+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.s1s88tmSmm --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools [2 ymakes processing] [8362/8362 modules configured] [2105/5314 modules rendered] [2 ymakes processing] [8362/8362 modules configured] [5240/5314 modules rendered] [2 ymakes processing] [8362/8362 modules configured] [5314/5314 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [8368/8368 modules configured] [5314/5314 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/common/libcore-kqp-common.a | 2.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build | 4.0%| PREPARE $(VCS) | 9.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a | 9.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a | 9.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a | 9.7%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |10.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |10.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |10.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |10.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |10.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |11.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |11.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |11.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |11.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |11.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |11.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |12.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |12.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |12.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |12.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |13.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |13.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |13.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |14.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |15.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |15.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |15.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a |16.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |16.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |17.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |18.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |20.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |21.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |23.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |25.1%| PREPARE $(YMAKE_PYTHON3-4256832079) |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |26.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |39.1%| PREPARE $(LLD_ROOT-3808007503) |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |39.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_yql.h_serialized.cpp |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |39.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_timeouts.cpp |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |39.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_yql.cpp |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/core/ut/ydb-library-actors-core-ut |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |52.6%| PREPARE $(PYTHON) |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |52.9%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |53.0%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/batch/libkqp-common-batch.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |54.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/workload/benchmark_base/ut/ydb-library-workload-benchmark_base-ut |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut/ydb-core-base-ut |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/pgproxy/pg_proxy_ut.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/opt/ut/ydb-library-yql-dq-opt-ut |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |53.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |55.3%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |55.8%| PREPARE $(CLANG_FORMAT-2212207123) |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |56.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |55.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |55.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_buffered_dynamic_table_writer.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |56.3%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/ut/ydb-core-blobstorage-vdisk-anubis_osiris-ut |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |56.5%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/flake8_linter/flake8_linter |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_pg/ydb-core-tablet_flat-ut_pg |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |56.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/actors/ut/yql_arrow_push_down_ut.cpp |56.8%| PREPARE $(FLAKE8_PY3-715603131) |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |56.7%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/common/kqp_tx.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/control.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_lwtrace_probes.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_event_impl.cpp |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |56.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_resolve.h_serialized.cpp |56.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_resolve.cpp |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_script_executions.cpp |56.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_ru_calc.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/kqp_tx_info.h_serialized.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_types.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |56.5%| PREPARE $(TEST_TOOL_HOST-sbr:8067063302) |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_user_request_context.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/common/kqp_tx_manager.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpython-symbols-python.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |57.0%| PREPARE $(FLAKE8_PY2-2255386470) |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |57.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |58.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/callstack.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/library/actors/core/actorsystem.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/cpu_manager.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/activity_guard.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/actor_virtual.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ask.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/actor_coroutine.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/actor.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/actorid.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/actor_bootstrapped.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/actor.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/thread_context.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/event.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/events.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/io_dispatcher.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/defs.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/buffer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/av_bootstrapped.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/event_pb.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_thread_ctx.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/events_undelivered.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/event_load.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/interconnect.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/execution_stats.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_pool_basic.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_pool_base.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_pool_io.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/scheduler_cookie.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_pool_shared.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/executor_thread.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/monotonic.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/mailbox.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/log_settings.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/log_buffer.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/mon_stats.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/mon.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/log.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/log_iface.h_serialized.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/mailbox_lockfree.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/monotonic_provider.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/process_stats.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/scheduler_actor.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/scheduler_basic.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/probes.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/actor_basic_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/process_stats_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/actorsystem_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/performance_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/ask_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/scheduler_actor_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/event_pb_payload_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/executor_pool_basic_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/actor_coroutine_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/event_pb_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/mailbox_lockfree_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/actor_shared_threads.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/log_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/actor_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/provider/yql_s3_listing_strategy_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/benchmark_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/memory_stats_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/path_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/logoblob_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/blobstorage_grouptype_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/workload/benchmark_base/state_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/mdb_endpoint_generator_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/cluster_ordering-ut |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_actions.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_context.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_sectormap.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_run.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/opt/ut/dq_opt_interesting_orderings_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_races.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_log_cache_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/opt/ut/dq_cbo_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_blockdevice_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/mock/pdisk_mock.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/anubis_osiris/blobstorage_anubis_algo_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_appendix_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/snap_vec_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_segment_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/fresh/fresh_data_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/localdb_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_guardian_impl_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_large/ut_btree_index_large.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut_large.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/query/query_spacetracker_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut_pg/flat_database_pg_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_mirror3of4/main.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/common/util_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |59.7%| [AS] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/c79bf977cdb0ffe390211f5e3d.yasm |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_color_limits.cpp |59.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/3240ce8937f3b09a8e2e5f16a2.yasm |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_util_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/c394ce038261e16ed3481e308d.auxcpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/ydb-library-yaml_config-static_validator-ut |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/ut/ydb-core-fq-libs-metrics-ut |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.a |59.9%| PREPARE $(CLANG-2518231432) |59.9%| PREPARE $(CLANG18-3363451693) |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.a |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |59.9%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |59.9%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/ut/table_index_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/statestorage_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |59.9%| PREPARE $(WITH_JDK-sbr:7832760150) |60.0%| PREPARE $(JDK17-472926544) |60.0%| PREPARE $(WITH_JDK17-sbr:7832760150) |60.0%| PREPARE $(JDK_DEFAULT-472926544) |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/cpp_style_checker/cpp_style_checker |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_crypto_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_restore_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_helpers.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/opt/ut/dq_opt_hypergraph_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_yard.cpp |60.1%| PREPARE $(CLANG14-1922233694) |60.1%| PREPARE $(CLANG-1922233694) |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/ydb-dstool |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/accurate_accumulate/liblibrary-cpp-accurate_accumulate.a |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/ut/ydb-core-config-ut |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut/mon_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/linear_regression/liblibrary-cpp-linear_regression.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |60.2%| PREPARE $(GDB) |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |60.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/cppcoro/corobenchmark/corobenchmark |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/wilson/ut/ydb-library-actors-wilson-ut |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/ut/ydb-public-tools-lib-cmds-ut |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/main/libtesting-benchmark-main.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/benchmark/libcpp-testing-benchmark.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/cppcoro/corobenchmark/main.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/cppcoro/liblibrary-actors-cppcoro.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/wilson/wilson_trace_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/workload/tpch/ut/ydb-library-workload-tpch-ut |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/interconnect/ut_huge_cluster/ydb-library-actors-interconnect-ut_huge_cluster |59.9%| PREPARE $(CLANG16-1380963495) |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/cppcoro/task_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/cppcoro/task_actor_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/cppcoro/ut/ydb-library-actors-cppcoro-ut |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |59.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/interconnect/ut/protos/libinterconnect-ut-protos.a |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/ut/ydb-tests-postgres_integrations-library-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/dq/scheduler/ut/ydb-library-yql-providers-dq-scheduler-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/http/http_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/access/libclient-yc_private-access.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/dq/runtime/ut/file_cache_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_source_watermark_tracker_ut.cpp |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/dq/runtime/ut/ydb-library-yql-providers-dq-runtime-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump_ds_init/main.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/deprecated/liblibrary-yaml_config-deprecated.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/dq/scheduler/ut/dq_scheduler_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_issues_buffer_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_actor_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/compute/ut/dq_compute_actor_async_input_helper_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_group/main.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/accessor/sparsed/ut/ydb-core-formats-arrow-accessor-sparsed-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/no_llvm/libminikql-computation-no_llvm.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/accessor/sub_columns/ut/ut_sub_columns.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/dq/actors/grouped_issues_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/dq/provider/yql_dq_provider_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/dq/actors/actors_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/libclient-nc_private-iam.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/security/simple/libmvp-security-simple.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/auth_config_validator_ut/auth_config_validator_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/ydb/v1/libyc_private-ydb-v1.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/protos/libmvp-core-protos.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_tokens.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_test_runtime.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/core/mvp_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/core/libydb-mvp-core.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/quota/libclient-yc_private-quota.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/io_formats/arrow/scheme/csv_arrow_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_proto2yaml_ut.cpp >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/init/init_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/dq/actors/ut/ydb-library-yql-providers-dq-actors-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/console_dumper_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/yaml_config_parser_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/dq/provider/ut/ydb-library-yql-providers-dq-provider-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/bufferwithgaps_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/openid_connect.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/oidc_proxy_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a >> test.py::py2_flake8 [GOOD] |59.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/ptr_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut_huge_cluster/huge_cluster.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/60e0318359c6f321a643ba5530.auxcpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/accessor/sub_columns/ut/ydb-core-formats-arrow-accessor-sub_columns-ut |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> __main__.py::flake8 [GOOD] |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/oidc_proxy/libydb-mvp-oidc_proxy.a |59.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/85ebe612562ae66926ef97b463.yasm |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blob_depot/given_id_range_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/time_grid_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/base/batched_vec_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blob_depot/closed_interval_set_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/accessor/sparsed/ut/ut_sparsed.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/metering/stream_ru_calculator_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/workload/tpch/ut/queries_ut.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a >> test.py::flake8 [GOOD] |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/flake8 >> __main__.py::flake8 [GOOD] |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/log_backend/json_envelope_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp >> test_generator.py::flake8 [GOOD] >> test_init.py::flake8 [GOOD] >> run_tests.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/defrag/defrag_actor_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/accessor/composite/ut/ut_composite.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_strategy/strategy_ut.cpp |59.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/flake8 >> test_init.py::flake8 [GOOD] |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp >> runner.py::flake8 [GOOD] |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |59.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/flake8 >> run_tests.py::flake8 [GOOD] |59.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/ut_client/backpressure_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/flake8 >> runner.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_server_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_iter_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/backpressure/queue_backpressure_client_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_blobmap_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |59.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/groupinfo/ut/ydb-core-blobstorage-groupinfo-ut |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_partlayout_ut.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/groupinfo/blobstorage_groupinfo_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/accessor/composite/ut/ydb-core-formats-arrow-accessor-composite-ut |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut >> test.py::flake8 [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_strategy/ydb-core-blobstorage-dsproxy-ut_strategy |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/no_llvm/libminikql-invoke_builtins-no_llvm.a >> test.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut/ydb-core-blobstorage-backpressure-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp >> test.py::flake8 [GOOD] |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/persqueue/topic_parser/ut/ydb-library-persqueue-topic_parser-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a >> test.py::flake8 [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a >> __init__.py::py2_flake8 [GOOD] >> base.py::py2_flake8 [GOOD] >> datashard.py::py2_flake8 [GOOD] >> disk.py::py2_flake8 [GOOD] >> factories.py::py2_flake8 [GOOD] >> hive.py::py2_flake8 [GOOD] >> logs.py::py2_flake8 [GOOD] >> schemeshard.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> tpc_tests.py::flake8 [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/wardens/py2_flake8 >> schemeshard.py::py2_flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/flake8 >> tpc_tests.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/no_llvm/libminikql-comp_nodes-no_llvm.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut >> __main__.py::flake8 [GOOD] >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/downloader/flake8 >> __main__.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/persqueue/topic_parser/ut/topic_names_converter_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp >> test.py::py2_flake8 [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> gen-report.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/mock/libcommon-http_gateway-mock.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/service/query_history_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/py3test >> test.py::test_kikimr_config_generator_generic_connector_config [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/flake8 >> gen-report.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/cfg/bin/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp >> manager.cpp::clang_format [GOOD] >> manager.h::clang_format [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/utils/actor_system/clang_format >> manager.h::clang_format [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp >> integrations_test.py::test_read_jtest_results[o/OK] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed1] [GOOD] >> integrations_test.py::test_read_jtest_results[f/failed2] [GOOD] >> integrations_test.py::test_read_jtest_results[f/error1] [GOOD] >> integrations_test.py::test_read_jtest_results[s/skipped1] [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage/inference/ut/arrow_inference_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp >> integrations_test.py::test_read_jtest_results[s/skipped2] [GOOD] >> integrations_test.py::test_read_jtest_with_one_result [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> test_serializable.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/provider/ut/pushdown/pushdown_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/hullds_sst_it_all_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/generic/blobstorage_hullwritesst_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/flake8 >> __main__.py::flake8 [GOOD] |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] >> __init__.py::flake8 [GOOD] >> matchers.py::flake8 [GOOD] >> requests_client.py::flake8 [GOOD] >> tables.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/flake8 >> test_serializable.py::flake8 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/py3test >> integrations_test.py::test_read_jtest_with_one_result [GOOD] Test command err: /home/runner/.ya/build/build_root/43nv/00095a/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:26: PytestCollectionWarning: cannot collect test class 'TestCase' because it has a __init__ constructor (from: integrations_test.py) /home/runner/.ya/build/build_root/43nv/00095a/ydb/tests/postgres_integrations/library/ut/test-results/py3test/ydb/tests/postgres_integrations/library/pytest_integration.py:20: PytestCollectionWarning: cannot collect test class 'TestState' because it has a __init__ constructor (from: integrations_test.py) >> test.py::py2_flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/sqs/flake8 >> test_base.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a >> test.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> tstool.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] >> compare.py::flake8 [GOOD] >> __init__.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun >> test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/flake8 >> compare.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/flake8 >> tstool.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/flavours/flake8 >> __init__.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/service/ut/ydb-core-sys_view-service-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut >> kikimr_config.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> integrations_test.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_pdiskfit/lib/libblobstorage-ut_pdiskfit-lib.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit.cpp |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> test.py::py2_flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_portion_size.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/flake8 >> integrations_test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> __main__.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/local_ydb/flake8 >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> test.py::py2_flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> test_workload.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/flake8 >> test.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut >> test.py::py2_flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp >> test.py::py2_flake8 [GOOD] |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut >> test.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> test_postgres.py::flake8 [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/helpers/ut/ydb-library-actors-helpers-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/ut/ydb-core-client-metadata-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp >> __main__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> __init__.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a >> __main__.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> ydb-tests-tools-pq_read-test::import_test [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/test_meta/flake8 >> conftest.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/import_test >> ydb-tests-tools-pq_read-test::import_test [GOOD] |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/helpers/selfping_actor_ut.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/interconnect/ut_fat/ydb-library-actors-interconnect-ut_fat |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut_fat/main.cpp |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/examples/01_ping_pong/example_01_ping_pong |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} >> test_query_cache.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/01_ping_pong/main.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/265f01382ea29d19a1f8016ef1_raw.auxcpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.global.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/tool |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/flake8 >> test_workload.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> TFunctionsMetadataTest::Serialization [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/pdiskfit/pdiskfit |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/colons.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp >> test.py::flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/perf/main.cpp |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/tools/yasm/yasm |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/client/metadata/ut/unittest >> TFunctionsMetadataTest::Serialization [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/compstrat/hulldb_compstrat_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/driver/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/test/libvdisk-hulldb-test.a |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_pdiskfit/ut/main.cpp >> __init__.py::flake8 [GOOD] >> checker.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/validators_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/perf/table-perf >> test.py::py2_flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> main.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/serializability/flake8 >> checker.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} >> test_leader_start_inflight.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/flake8 >> main.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/libpy3statistics_workload.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp >> __init__.py::py2_flake8 [GOOD] >> checker.py::py2_flake8 [GOOD] >> test_break.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] >> StaticValidator::Hosts [GOOD] >> StaticValidator::HostConfigs [GOOD] >> StaticValidator::DomainsConfig [GOOD] >> test_stability.py::flake8 [GOOD] >> __init__.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> kikimr_client.py::flake8 [GOOD] >> kikimr_config_client.py::flake8 [GOOD] >> kikimr_dynconfig_client.py::flake8 [GOOD] >> kikimr_http_client.py::flake8 [GOOD] >> kikimr_keyvalue_client.py::flake8 [GOOD] >> kikimr_monitoring.py::flake8 [GOOD] >> kikimr_scheme_client.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp >> test.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/serializability/py2_flake8 >> checker.py::py2_flake8 [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/flake8 >> test_stability.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/clients/flake8 >> kikimr_scheme_client.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |59.0%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/unittest >> StaticValidator::DomainsConfig [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/flake8 >> __main__.py::flake8 [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/interconnect/ut/ydb-library-actors-interconnect-ut |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/channel_scheduler_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/event_holder_pool_ut.cpp >> test.py::py2_flake8 [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/poller_actor_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/dynamic_proxy_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/interconnect_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/meta_cache_ut.cpp >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/outgoing_stream_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/postgres_integrations/library/ut/import_test >> ydb-tests-postgres_integrations-library-ut::import_test [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/sticking_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/mvp/meta/libydb-mvp-meta.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/interconnect/ut/large.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a >> test.py::flake8 [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} >> test_transform.py::flake8 [GOOD] >> TWeighedOrderingTest::SimpleSelectionTest [GOOD] >> TWeighedOrderingTest::WeighedSelectionTest [GOOD] >> ConfigProto::ForbidNewRequired [GOOD] >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> http_client.py::flake8 [GOOD] >> query_results.py::flake8 [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/flake8 >> test_transform.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> __init__.py::py2_flake8 [GOOD] >> kikimr_client.py::py2_flake8 [GOOD] >> kikimr_config_client.py::py2_flake8 [GOOD] >> kikimr_dynconfig_client.py::py2_flake8 [GOOD] >> kikimr_http_client.py::py2_flake8 [GOOD] >> kikimr_keyvalue_client.py::py2_flake8 [GOOD] >> kikimr_monitoring.py::py2_flake8 [GOOD] >> kikimr_scheme_client.py::py2_flake8 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/ut/unittest >> ConfigProto::ForbidNewRequired [GOOD] |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest >> TWeighedOrderingTest::WeighedOrderingTest [GOOD] |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] >> __main__.py::flake8 [GOOD] >> common.cpp::clang_format [GOOD] >> common.h::clang_format [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/http_api_client/flake8 >> query_results.py::flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a >> test_example.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> Metrics::SeveralSubItems [GOOD] >> test.py::py2_flake8 [GOOD] >> SanitizeLable::SkipSingleBadSymbol [GOOD] >> SanitizeLable::SkipBadSymbols [GOOD] >> SanitizeLable::Truncate200 [GOOD] >> SanitizeLable::Empty [GOOD] >> Metrics::MoreThanFiveItems [GOOD] >> Metrics::CombineSubItems [GOOD] >> Metrics::EmptyIssuesList [GOOD] >> Metrics::OnlyOneItem [GOOD] >> Metrics::SeveralTopItems [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/clients/py2_flake8 >> kikimr_scheme_client.py::py2_flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/federated_query/common/clang_format >> common.h::clang_format [GOOD] >> __main__.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a >> test_restart_query.py::flake8 [GOOD] |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a >> test_row_dispatcher.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} >> test_liveness_wardens.py::flake8 [GOOD] >> __init__.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> datashard.py::flake8 [GOOD] >> disk.py::flake8 [GOOD] >> factories.py::flake8 [GOOD] >> hive.py::flake8 [GOOD] >> logs.py::flake8 [GOOD] >> schemeshard.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/metrics/ut/unittest >> Metrics::SeveralTopItems [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} >> test_workload.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/wardens/flake8 >> schemeshard.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a >> ydb-dstool::import_test [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_pdiskfit/ut/ydb-core-blobstorage-ut_pdiskfit-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/apps/dstool/import_test >> ydb-dstool::import_test [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} >> test_tpcds.py::flake8 [GOOD] |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |59.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/statistics_workload/statistics_workload |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ydb-tests-example |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |59.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/large/flake8 >> test_tpcds.py::flake8 [GOOD] |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |59.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |59.0%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/tsserver/tsserver |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/ut/protos/interconnect_test.pb.cc |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/tsserver/main.cpp |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/run_tests |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/ut_pg/scheme_tablecell_pg_ut.cpp >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/run_tests/libpy3benchmarks-runner-run_tests.global.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut_pg/ydb-core-scheme-ut_pg |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/address_classifier_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/event_priority_queue_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/page_map_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/interval_set_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/concurrent_rw_hash_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hazard_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fast_tls_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_cache_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/hyperlog_counter_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/fragmented_buffer_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/log_priority_mute_checker_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_heap_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lf_stack_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_stack_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/operation_queue_priority_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/lz4_data_generator_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/stlog_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_oneone_inplace_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ulid_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_cow_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/queue_inplace_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/actors/spilling/spilling_file_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/wildcard_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/simple_cache_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/ui64id_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/token_bucket_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/circular_queue_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/intrusive_fixed_hash_set_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/bits_ut.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/cache_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_matrix_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/ingress/blobstorage_ingress_ut.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/actors/spilling/ut/ydb-library-yql-dq-actors-spilling-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_basic_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_id_dict_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/ut/ydb-core-util-ut |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/incrhuge/ut/incrhuge_log_merger_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/ut_transform/ydb-library-yaml_config-ut_transform |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_pool_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/blobsan/main.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/util_string_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/examples/02_discovery/example_02_discovery |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp >> run_tests::import_test [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/02_discovery/lookup.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/02_discovery/publish.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/02_discovery/replica.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/02_discovery/endpoint.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/examples/02_discovery/protocol.pb.cc |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/examples/02_discovery/main.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_replication.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_import.pb.{h, cc} |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/run_tests/import_test >> run_tests::import_test [GOOD] |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |58.7%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/970aa707caacfb3ae8e3b5e2ab.yasm |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/cdf2a00c973a8918356f2505f1.auxcpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/connector.{pb.h ... grpc.pb.h} |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/ingress/ut/ydb-core-blobstorage-vdisk-ingress-ut |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/blobsan/blobsan |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/core/ut_fat/ydb-library-actors-core-ut_fat |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/ut_fat/actor_benchmark.cpp >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/common/http_gateway/ut/ydb-library-yql-providers-common-http_gateway-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/service_node/main.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/auth_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufresize_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebuf_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/memusage_ut.cpp >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_outofspace_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/circlebufstream_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_config_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_syncneighbors_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_lsnmngr_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/common/vdisk_pdisk_error_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/common/http_gateway/yql_aws_signature_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/common/http_gateway/yql_dns_gateway_ut.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp >> ydb-tests-functional-serializable::import_test [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serializable/import_test >> ydb-tests-functional-serializable::import_test [GOOD] |58.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhuge_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/blobstorage_hullhugeheap_ctx_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/huge/top_ut.cpp |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/ttl_tiering/import_test >> ydb-tests-olap-ttl_tiering::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] >> ydb-tests-functional-ttl::import_test [GOOD] |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} >> ydb-tests-functional-tenants::import_test [GOOD] |57.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/lib/cmds/ut/import_test >> ydb-public-tools-lib-cmds-ut::import_test [GOOD] |57.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ttl/import_test >> ydb-tests-functional-ttl::import_test [GOOD] |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_arrow.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/service_node/service_node |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_column_filter.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp >> statistics_workload::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/statistics_workload/import_test >> statistics_workload::import_test [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_hash.cpp |58.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/1be310ddb8a4007766dca2cc77.yasm |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_dictionary.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_reader.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/formats/arrow/ut/ut_program_step.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/b5139bcb976066539a8304ce75.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp >> ydb-tests-example::import_test [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yaml_config/tools/dump/main.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/example/import_test >> ydb-tests-example::import_test [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |58.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/downloader/libpy3library-compatibility-downloader.global.a |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} >> ydb-tests-fq-multi_plane::import_test [GOOD] |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/downloader/5ffbb4322b471825685b8f467f_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/downloader/103a0b2e3839ebab97c29ba02e_raw.auxcpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdata_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/multi_plane/import_test >> ydb-tests-fq-multi_plane::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogdsk_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmem_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgimpl_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogmsgwriter_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/blobstorage_synclogkeeper_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/synclog/codecs_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |58.0%| [PR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/{c79bf977cdb0ffe390211f5e3d.yasm ... ro_363ad6a7a0ee9cfe4ed6517f8f.rodata} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp >> ydb-tests-olap-scenario::import_test [GOOD] |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/double_indexed_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/gen_restarts.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/yt/actors/ut/yql_yt_lookup_actor_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ut_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/import_test >> ydb-library-yaml_config-ut_transform::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp >> ydb-tests-functional-tpc-medium::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/no_llvm/libminikql-codegen-no_llvm.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/mon_reregister_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/huge_migration_ut.cpp |58.1%| [BN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stability/tool/ydb_cli |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk/vdisk_test.cpp |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/rescompressor/rescompressor |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/compatibility/downloader/downloader |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/synclog/ut/ydb-core-blobstorage-vdisk-synclog-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.a |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |57.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/downloader/038c984b551bacc56e4d50d8dd_raw.auxcpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/libpy3tests-tools-ydb_serializable.global.a |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/ut/ydb-core-viewer-json-ut |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |57.7%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/olap_workload |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/terminate_policy/libudf-service-terminate_policy.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/457f0bfdb7e6ec7f5dfbf7d44f.auxcpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/9491890bedc20286ea9b7bfbd4.auxcpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |58.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/746459528cbcd4301762245782.yasm >> ydb-tests-functional-postgresql::import_test [GOOD] |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |57.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |58.1%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/92f45888db1a579e0ac6c21355.yasm >> Json::BasicRendering [GOOD] |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/cccd51aecdb81b6d2189d001fc.auxcpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/no_llvm/libcodec-codegen-no_llvm.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/ydb_serializable |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/no_llvm/libyt-comp_nodes-no_llvm.a |58.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/831059a5eed8c4171c5826ec58.yasm |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/yt/actors/ut/ydb-library-yql-providers-yt-actors-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/checksum.{pb.h ... grpc.pb.h} |58.0%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/json/ut/unittest >> Json::BasicRendering [GOOD] |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/connector.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/testlib/actors/test_runtime_ut.cpp >> ydb-tests-fq-http_api::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} >> ydb-tests-library-ut::import_test [GOOD] |57.9%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/cfg |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/core/harmonizer/ut/ydb-library-actors-core-harmonizer-ut |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/ut/import_test >> ydb-tests-library-ut::import_test [GOOD] |58.0%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/statistics_workload |58.0%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/nemesis |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/harmonizer/ut/cpu_count_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/harmonizer/ut/shared_info_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/harmonizer/ut/history_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/core/harmonizer/ut/harmonizer_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/testlib/actors/ut/ydb-core-testlib-actors-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare_scheme.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_prepare.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_drop.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/b9c5a7cd74c2c9a74f760af767.auxcpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |58.0%| [AS] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/7c56ed0639c0a21b31c888cd2a.yasm |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_query.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/cmd_run_bench.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.global.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/commands.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/tpch/lib/libtests-tpch-lib.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tests/tpch/main.cpp |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} >> ydb-tests-stress-transfer-tests::import_test [GOOD] |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |57.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |57.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/transfer/tests/import_test >> ydb-tests-stress-transfer-tests::import_test [GOOD] |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/s3/object_listers/ut/ydb-library-yql-providers-s3-object_listers-ut |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/s3/object_listers/yql_s3_path_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_bootstrap_ut.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/registry_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/validators/validator_nameservice_ut.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |57.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/version/ut/version_ut.cpp |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_view.pb.{h, cc} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/validators/ut/ydb-core-cms-console-validators-ut |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tests/tpch/tpch |57.7%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/test/tool/surg/main.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/barriers/barriers_tree_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |57.6%| [BN] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/tool/simple_queue |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg/libcpp-lfalloc-dbg.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |57.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |57.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/test/tool/surg/surg |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/oidc_proxy/bin/main.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |57.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk2/huge.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_hullreplwritesst_ut.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/repl/blobstorage_replrecoverymachine_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |57.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_readbatch_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/blobstorage_hullcompactdeferredqueue_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hullop/hullop_delayedresp_ut.cpp |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |59.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |59.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_ranges_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_borders_ut.cpp >> generator::import_test [GOOD] >> ydb-tests-fq-yds::import_test [GOOD] |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_tablecell_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/scheme/scheme_types_proto_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/external_data_source_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/object_storage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |60.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/docs/generator/import_test >> generator::import_test [GOOD] |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/mvp/meta/bin/main.cpp |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |60.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/util/btree_benchmark/main.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/testlib/ut/ydb-library-actors-testlib-ut |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp >> ydb_serializable::import_test [GOOD] |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/testlib/decorator_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |63.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/interconnect/ut/protos/libinterconnect-ut-protos.a |64.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/import_test >> ydb_serializable::import_test [GOOD] |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |65.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |66.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |66.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |67.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/scheme/ut/ydb-core-scheme-ut |67.6%| PREPARE $(BLACK_LINTER-sbr:6648883615) |67.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |67.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |67.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |67.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |68.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/dnsresolver/dnsresolver_ondemand_ut.cpp |68.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/dnsresolver/dnsresolver_ut.cpp |68.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/actors/dnsresolver/dnsresolver_caching_ut.cpp |68.6%| [ld] {default-linux-x86_64, relwithdebinfo} $(B)/tools/black_linter/black_linter |68.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/actors/dnsresolver/ut/ydb-library-actors-dnsresolver-ut |70.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> conftest.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |73.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |73.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |73.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |74.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |74.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] |74.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |75.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |75.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |75.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/util/btree_benchmark/btree_benchmark |75.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |76.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |76.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |76.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |76.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut.cpp |76.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |77.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/compatibility/downloader/libpy3library-compatibility-downloader.global.a |77.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |77.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |77.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |77.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |77.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hullop/ut/ydb-core-blobstorage-vdisk-hullop-ut |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/mvp/meta/bin/mvp_meta |79.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |79.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |79.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |79.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/library/compatibility/downloader/downloader |80.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/library/actors/core/actorsystem.cpp >> ydb_recipe::import_test [GOOD] |81.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/library/actors/core/actorsystem.cpp |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |81.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/formats/arrow/ut/ut_splitter.cpp |81.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/formats/arrow/ut/ut_arrow.cpp |81.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |81.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/formats/arrow/ut/ut_size_calcer.cpp |81.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/public/tools/ydb_recipe/import_test >> ydb_recipe::import_test [GOOD] |81.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |82.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/libpy3tools-ydb_serializable-replay.global.a |82.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |84.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/nemesis/ut/import_test >> ydb-tests-tools-nemesis-ut::import_test [GOOD] |84.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/cache_block/cache_block_ut.cpp |84.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |84.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |83.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/tests/import_test >> ydb-tests-tools-kqprun-tests::import_test [GOOD] |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |83.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 >> downloader::import_test [GOOD] |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |83.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/downloader/import_test >> downloader::import_test [GOOD] |83.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/ut/ydb-core-fq-libs-signer-ut |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk2/ydb-core-blobstorage-ut_vdisk2 |83.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/helpers/ut/ydb-library-actors-helpers-ut |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/common/http_gateway/ut/ydb-library-yql-providers-common-http_gateway-ut |83.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |83.8%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a >> ydb-tests-functional-scheme_shard::import_test [GOOD] |83.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/public/types_ut.cpp >> Signer::Basic [GOOD] |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/formats/arrow/ut/ydb-library-formats-arrow-ut |83.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_shard/import_test >> ydb-tests-functional-scheme_shard::import_test [GOOD] |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/core/ut/ydb-library-actors-core-ut |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/signer/ut/unittest >> Signer::Basic [GOOD] |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut >> TSelfPingTest::Basic [GOOD] |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest >> TAwsSignature::SignPayload [GOOD] >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolIPV4Case [GOOD] >> TDNSGatewaySuite::ShouldUsePreviouslyKnownResolutionIfDNSIsNotResponding [GOOD] >> TAwsSignature::SignWithCanonization [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest >> TSelfPingTest::Basic [GOOD] >> TAwsSignature::Sign [GOOD] >> TAwsSignature::SignCmp [GOOD] >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolIPV6Case [GOOD] >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolANYCase [GOOD] >> TAwsSignature::SignWithTime [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/helpers/ut/unittest |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp >> TAwsSignature::SignWithEscaping [GOOD] >> TDNSGatewaySuite::ShouldResolveHostnameFromDNSDuringInitialization [GOOD] |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolIPV4Case [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TAwsSignature::SignPayload [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TDNSGatewaySuite::ShouldUsePreviouslyKnownResolutionIfDNSIsNotResponding [GOOD] |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TAwsSignature::SignWithEscaping [GOOD] |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TAwsSignature::SignCmp [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolANYCase [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TDNSGatewaySuite::ShouldFilterDNSAddressedBasedOnProvidedProtocolIPV6Case [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TAwsSignature::SignWithCanonization [GOOD] |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large >> ActorBenchmark::SendActivateReceive1Pool1ThreadAlloc >> ActorBasic::ActorSendReceive [GOOD] >> ActorBenchmark::SendActivateReceive2Pool1ThreadAlloc [GOOD] >> ActorBenchmark::SendActivateReceive2Pool1ThreadNoAlloc >> AskActor::Ok [GOOD] >> AskActor::Timeout [GOOD] >> BasicExecutorPool::Semaphore >> ActorBenchmark::WithOnlyOneSharedExecutors [GOOD] >> ActorBenchmark::WithOnlyOneSharedAndOneCommonExecutors >> ActorBenchmark::WithOnlyOneNotSharedExecutors [GOOD] >> ActorBenchmark::SendActivateReceive1Pool1ThreadAlloc [GOOD] |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TDNSGatewaySuite::ShouldResolveHostnameFromDNSDuringInitialization [GOOD] |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |83.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/common/http_gateway/ut/unittest >> TAwsSignature::SignWithTime [GOOD] |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_blob_ut.cpp >> ActorBenchmark::SendActivateReceive1Pool1ThreadNoAlloc [GOOD] >> ActorBenchmark::SendActivateReceive1Pool2ThreadsAlloc [GOOD] >> ActorBenchmark::SendActivateReceive1Pool2ThreadsNoAlloc >> TLoggerActorTest::NoCrashOnWriteFailure [GOOD] >> TLoggerActorTest::SubsequentWritesAreIgnored [GOOD] >> TLoggerActorTest::LoggerCanRecover [GOOD] >> TLoggerActorTest::ShouldObeyTimeThresholdMsWhenOverloaded [GOOD] >> TLoggerActorTest::ShouldUseLogBufferWhenOverloaded [GOOD] >> TLoggerActorTest::ShouldLoseLogsIfBufferZeroSize [GOOD] >> TProcStat::Fill [GOOD] >> ActorBenchmark::SendActivateReceive2Pool1ThreadNoAlloc [GOOD] >> ActorBenchmark::SendActivateReceive1Pool4Threads >> ActorBenchmark::WithOnlyOneSharedAndOneCommonExecutors [GOOD] >> ActorBenchmark::WithSharedExecutors >> ActorBenchmark::SendActivateReceive1Pool2ThreadsNoAlloc [GOOD] >> ActorBenchmark::SendActivateReceive1Pool1Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool2Threads |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_heap_it_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullds_generic_it_ut.cpp >> ActorCoro::Basic >> ChangingThreadsCountInBasicExecutorPool::DecreaseIncreaseThreadCount >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] >> BasicExecutorPool::Semaphore [GOOD] >> BasicExecutorPool::CheckCompleteOne [GOOD] >> BasicExecutorPool::CheckCompleteAll |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/hullbase_barrier_ut.cpp |83.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/replay/replay |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullstorageratio_ut.cpp >> BasicExecutorPool::CheckCompleteAll [GOOD] >> BasicExecutorPool::CheckCompleteOver [GOOD] >> BasicExecutorPool::CheckCompleteRoundRobinOver [GOOD] >> BasicExecutorPool::CheckStats [GOOD] >> ActorBenchmark::WithSharedExecutors [GOOD] >> ActorBenchmark::WithoutSharedExecutors >> ActorBenchmark::SendActivateReceive1Pool2Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool3Threads ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> TProcStat::Fill [GOOD] Test command err: 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 0 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 1 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 2 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 3 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 4 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 5 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 6 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 7 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 8 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 9 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 10 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 11 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 12 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 13 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 14 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 15 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 16 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 17 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 18 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 19 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 20 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 21 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 22 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 23 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 24 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 25 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 26 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 27 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 28 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 29 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 30 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 31 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 32 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 33 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 34 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 35 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 36 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 37 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 38 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 39 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 40 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 41 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 42 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 43 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 44 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 45 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 46 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 47 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 48 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 49 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 50 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 51 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 52 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 53 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 54 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 55 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 56 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 57 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 58 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 59 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 60 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 61 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 62 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 63 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 64 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 65 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 66 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 67 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 68 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 69 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 70 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 71 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 72 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 73 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 74 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 75 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 76 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 77 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 78 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 79 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 80 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 81 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 82 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 83 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 84 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 85 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 86 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 87 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 88 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 89 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 90 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 91 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 92 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 93 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 94 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 95 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 96 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 97 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 98 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 99 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 0 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 1 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 2 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 3 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 4 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 5 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 6 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 7 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 8 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 9 1970-01-01T23:59:50.000000Z :FAKE DEBUG: 10 1970-01-02T00:00:00.000000Z :FAKE ERROR: Logger overflow! Ignored 89 log records with priority [DEBUG] or lower! Fill = 0 AnonRss = 10657792 CGroupMemLim = 0 MemTotal = 0 MemAvailable = 0 |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/hulldb/base/blobstorage_hullsatisfactionrank_ut.cpp |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckWithoutQueryResult [GOOD] |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut >> ActorBenchmark::WithoutSharedExecutors [GOOD] >> ActorBenchmark::SendReceive1Pool1ThreadAlloc |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> BasicExecutorPool::CheckStats [GOOD] |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> ActorBenchmark::SendActivateReceive1Pool4Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool5Threads |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk >> ActorBenchmark::SendReceive1Pool1ThreadAlloc [GOOD] >> ActorBenchmark::SendReceive1Pool1ThreadNoAlloc >> ActorBenchmark::SendActivateReceive1Pool3Threads [GOOD] >> ActorBenchmark::SendReceive1Pool1ThreadNoAlloc [GOOD] >> ActorBenchmark::SendActivateReceiveWithMailboxNeighbours |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/interconnect/ut/ydb-library-actors-interconnect-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> ActorBenchmark::SendActivateReceive1Pool3Threads [GOOD] Test command err: Completed 174.7137571 Elapsed 714.586338us 519.579623 ± 21.40943077 ns 4.2% min 494.0993906 ns max 558.2424343 ns 413.4261014 ± 15.87617497 ns 3.9% min 390.322325 ns max 438.7594482 ns Lazy 390.766393 ± 34.37021954 ns 8.8% min 372.1191491 ns max 459.4729227 ns Tail 482.3732263 ± 66.3537767 ns 13.8% min 442.5650286 ns max 613.6686619 ns 418.207307 ± 44.02067346 ns 10.6% min 360.7194641 ns max 477.6470024 ns Lazy 373.5201806 ± 31.72667365 ns 8.5% min 349.46526 ns max 431.0499622 ns Tail 1648.945674 ± 721.4376831 ns 43.8% min 907.6505052 ns max 2605.772145 ns 926.4458387 ± 56.90417783 ns 6.2% min 850.5748248 ns max 1010.927919 ns Lazy 760.5901541 ± 229.0834431 ns 30.2% min 598.3640681 ns max 1209.080972 ns Tail 1062.35312 ± 66.76389214 ns 6.3% min 938.8054972 ns max 1130.201212 ns 1154.089983 ± 365.3416768 ns 31.7% min 696.2486544 ns max 1623.961481 ns Lazy 888.720326 ± 427.5596136 ns 48.2% min 446.9455096 ns max 1453.695618 ns Tail 484918.842 ± 24829.46141 ns 5.2% min 451593.3233 ns max 527916.4849 ns 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 1 397258.914 ± 28404.9625 ns 7.2% min 360884.9592 ns max 447839.6595 ns 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 1 Lazy 441937.8375 ± 80959.87199 ns 18.4% min 351475.2406 ns max 550269.077 ns 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 1 Tail 545662.6449 ± 104084.3067 ns 19.1% min 465845.2181 ns max 735081.6432 ns 512 ± 0 0% min 512 max 512 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 2 599437.3624 ± 101117.8858 ns 16.9% min 508033.5932 ns max 776149.4318 ns 512 ± 0 0% min 512 max 512 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 2 Lazy 436795.7691 ± 88231.98106 ns 20.2% min 361216.7249 ns max 604907.1873 ns 512 ± 0 0% min 512 max 512 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 2 Tail 3693211.909 ± 1236662.903 ns 33.5% min 2188225.647 ns max 4969453.003 ns 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 1 2371890.765 ± 780498.7733 ns 33% min 1503927.217 ns max 3380519.583 ns 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 1 Lazy 1977490.85 ± 374918.1584 ns 19% min 1508714.266 ns max 2554912.999 ns 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 1 Tail 1095778.969 ± 147280.2179 ns 13.5% min 813151.8371 ns max 1201048.141 ns 1484.8 ± 102.4 6.9% min 1280 max 1536 716.8 ± 102.4 14.3% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 2 1453136.988 ± 327170.6178 ns 22.6% min 955205.7119 ns max 1828171.606 ns 1484.8 ± 102.4 6.9% min 1280 max 1536 716.8 ± 102.4 14.3% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 2 Lazy 1042921.569 ± 289651.108 ns 27.8% min 740972.2393 ns max 1535917.053 ns 1484.8 ± 102.4 6.9% min 1280 max 1536 716.8 ± 102.4 14.3% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 2 Tail 1514329.426 ± 618226.5265 ns 40.9% min 931532.0708 ns max 2422063.369 ns 1536 ± 0 0% min 1536 max 1536 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 3 1200259.12 ± 492610.8586 ns 41.1% min 725653.883 ns max 2152421.007 ns 1382.4 ± 204.8 14.9% min 1024 max 1536 409.6 ± 125.4138748 30.7% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 3 Lazy 969333.3199 ± 329041.6269 ns 34% min 637140.5853 ns max 1369272.42 ns 1433.6 ± 204.8 14.3% min 1024 max 1536 409.6 ± 204.8 50% min 0 max 512 512 ± 0 0% min 512 max 512 actorPairs: 3 Tail 1406537.231 ± 573225.349 ns 40.8% min 919153.4998 ns max 2137119.69 ns 1024 ± 0 0% min 1024 max 1024 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 4 1483012.945 ± 596703.2617 ns 40.3% min 983861.8532 ns max 2346348.611 ns 1024 ± 0 0% min 1024 max 1024 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 4 Lazy 856528.3546 ± 277441.4607 ns 32.4% min 605531.6286 ns max 1243157.334 ns 1024 ± 0 0% min 1024 max 1024 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 4 Tail 5452878.398 ± 1956033.217 ns 35.9% min 3581667.854 ns max 7844570.066 ns 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 1 3752165.58 ± 939294.6197 ns 25.1% min 2920443.681 ns max 5127183.06 ns 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 1 Lazy 3603434.493 ± 904720.1719 ns 25.2% min 2358890.559 ns max 4647952.948 ns 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 1 Tail 2999394.307 ± 992343.0291 ns 33.1% min 1572050.452 ns max 3941846.182 ns 2560 ± 0 0% min 2560 max 2560 1280 ± 0 0% min 1280 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 2 2488479.063 ± 617163.0065 ns 24.9% min 1717937.174 ns max 3141496.894 ns 2560 ± 0 0% min 2560 max 2560 1280 ± 0 0% min 1280 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 2 Lazy 1353715.112 ± 59211.25306 ns 4.4% min 1241900.433 ns max 1401198.108 ns 2560 ± 0 0% min 2560 max 2560 1280 ± 0 0% min 1280 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 2 Tail 2303540.005 ± 493812.7907 ns 21.5% min 1324288.593 ns max 2632940.094 ns 2304 ± 0 0% min 2304 max 2304 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 3 1895453.499 ± 579135.7402 ns 30.6% min 1139874.947 ns max 2387561.735 ns 2304 ± 0 0% min 2304 max 2304 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 3 Lazy 1288726.111 ± 252151.7823 ns 19.6% min 791836.6395 ns max 1486204.311 ns 2304 ± 0 0% min 2304 max 2304 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 3 Tail 2328962.883 ± 471014.7349 ns 20.3% min 1390004.271 ns max 2620490.358 ns 2048 ± 0 0% min 2048 max 2048 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 4 2102003.638 ± 602249.5248 ns 28.7% min 1356114.049 ns max 2684783.754 ns 2048 ± 0 0% min 2048 max 2048 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 4 Lazy 1199166.398 ± 307567.5514 ns 25.7% min 822347.0607 ns max 1528295.461 ns 2048 ± 0 0% min 2048 max 2048 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 4 Tail 1730886.662 ± 570956.6856 ns 33% min 1233647.384 ns max 2430912.794 ns 2560 ± 0 0% min 2560 max 2560 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 5 1812221.191 ± 590620.1273 ns 32.6% min 1310372.473 ns max 2588772.149 ns 2560 ± 0 0% min 2560 max 2560 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 5 Lazy 1440625.711 ± 42221.61678 ns 3% min 1391678.135 ns max 1513824.06 ns 2560 ± 0 0% min 2560 max 2560 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 5 Tail 1823191.511 ± 500584.3223 ns 27.5% min 1190964.868 ns max 2301465.015 ns 1536 ± 0 0% min 1536 max 1536 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 6 2022940.553 ± 557283.0024 ns 27.6% min 1342052.594 ns max 2610830.061 ns 1536 ± 0 0% min 1536 max 1536 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 6 Lazy 1011071.656 ± 252940.3766 ns 25.1% min 734337.9266 ns max 1369092.004 ns 1536 ± 0 0% min 1536 max 1536 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 6 Tail >> TBlobStorageHullFresh::AppendixPerf >> TFreshAppendixTest::IterateForwardIncluding [GOOD] >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp >> ActorBenchmark::SendActivateReceiveWithMailboxNeighbours [GOOD] >> TFreshAppendixTest::IterateForwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] >> TSTreeTest::Basic [GOOD] >> TSVecTest::Basic [GOOD] >> TBlobStorageHullFresh::SimpleBackWardEnd2Times [GOOD] >> TBlobStorageHullFresh::Perf >> TBlobStorageHullFresh::SimpleBackwardEnd [GOOD] >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |83.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |84.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |84.0%| [TA] $(B)/ydb/library/actors/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ActorBenchmark::SendActivateReceive1Pool5Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool6Threads |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateForwardExcluding [GOOD] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackWardMiddle2Times [GOOD] |84.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TSVecTest::Basic [GOOD] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardIncluding [GOOD] |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> ActorBenchmark::SendActivateReceiveWithMailboxNeighbours [GOOD] Test command err: Completed 1828.087589 Elapsed 9298.046928us Completed 160.2464581 Elapsed 664.9133053us Completed 6461.419769 Completed 77507.4301 Elapsed 620304.3248us Completed 59615.82846 Elapsed 477115.3281us 184.4214582 ± 48.13832075 ns 26.2% min 150.5724845 ns max 274.4466389 ns Simple 163.3989552 ± 18.56234788 ns 11.4% min 144.3557108 ns max 198.7602526 ns Simple Lazy 157.0670524 ± 4.66087095 ns 3% min 152.8259271 ns max 165.8652062 ns Simple Tail 180.9986725 ± 27.85539619 ns 15.4% min 151.8772147 ns max 232.6080229 ns Revolving 172.2003182 ± 18.26446739 ns 10.7% min 156.5846733 ns max 206.9857693 ns Revolving Lazy 161.8543392 ± 4.551720545 ns 2.9% min 154.1446974 ns max 166.5331157 ns Revolving Tail 203.2591148 ± 43.46708645 ns 21.4% min 163.6950016 ns max 286.7568336 ns HTSwap 177.2294965 ± 11.02327457 ns 6.3% min 169.1375618 ns max 198.8454963 ns HTSwap Lazy 208.502305 ± 70.32110745 ns 33.8% min 165.3346894 ns max 347.7341669 ns HTSwap Tail 190.0868555 ± 29.96019344 ns 15.8% min 153.5199112 ns max 226.626923 ns ReadAsFilled 168.7277903 ± 2.736971171 ns 1.7% min 165.6365525 ns max 173.5551917 ns ReadAsFilled Lazy 202.9159337 ± 54.18705221 ns 26.8% min 164.7700752 ns max 307.9775031 ns ReadAsFilled Tail 248.1129513 ± 70.50454441 ns 28.5% min 161.5288085 ns max 307.7007118 ns TinyReadAsFilled 225.964429 ± 54.51888543 ns 24.2% min 167.8639205 ns max 311.3541569 ns TinyReadAsFilled Lazy 194.9347155 ± 48.46364448 ns 24.9% min 166.3365538 ns max 291.6217423 ns TinyReadAsFilled Tail 148.4287556 ± 28.059538 ns 19% min 132.1257456 ns max 204.1998042 ns Simple 198.5791348 ± 31.19597804 ns 15.8% min 153.6893957 ns max 239.4285226 ns Simple Lazy 162.0009583 ± 29.99406035 ns 18.6% min 137.0568435 ns max 214.6958122 ns Simple Tail 175.9775171 ± 45.34094601 ns 25.8% min 131.9843413 ns max 253.9169448 ns Revolving 155.1138683 ± 11.99271166 ns 7.8% min 139.4847849 ns max 168.4957268 ns Revolving Lazy 193.5292974 ± 48.12768384 ns 24.9% min 136.154263 ns max 243.9875568 ns Revolving Tail 133.0802745 ± 1.604939886 ns 1.3% min 131.1910733 ns max 135.5705943 ns HTSwap 154.2084798 ± 24.98461418 ns 16.3% min 140.3612909 ns max 204.0142738 ns HTSwap Lazy 172.6590296 ± 45.94497486 ns 26.7% min 137.3607122 ns max 256.8473227 ns HTSwap Tail 138.175241 ± 7.542038343 ns 5.5% min 131.5591256 ns max 147.6250579 ns ReadAsFilled 154.9046702 ± 20.45882308 ns 13.3% min 138.381631 ns max 194.9914778 ns ReadAsFilled Lazy 139.7487396 ± 4.065261748 ns 3% min 135.0741751 ns max 146.6813598 ns ReadAsFilled Tail 137.5917728 ± 6.609902533 ns 4.9% min 130.4148541 ns max 148.8104469 ns TinyReadAsFilled 181.4054355 ± 40.75740236 ns 22.5% min 140.5688844 ns max 250.7188014 ns TinyReadAsFilled Lazy 156.4085698 ± 33.53950375 ns 21.5% min 134.4905064 ns max 222.8350822 ns TinyReadAsFilled Tail 606.5681376 ± 146.3721456 ns 24.2% min 485.7219597 ns max 808.5178454 ns neighbourActors: 0 467.9956382 ± 73.93140887 ns 15.8% min 413.5044674 ns max 614.017752 ns neighbourActors: 0 Lazy 386.5351096 ± 10.96562669 ns 2.9% min 376.012577 ns max 405.9922428 ns neighbourActors: 0 Tail 503.7029363 ± 9.445756709 ns 1.9% min 491.1411563 ns max 518.1849641 ns neighbourActors: 1 401.0988117 ± 15.8272353 ns 4% min 376.4891773 ns max 424.381992 ns neighbourActors: 1 Lazy 387.0837518 ± 24.10215705 ns 6.3% min 361.7165738 ns max 426.4087977 ns neighbourActors: 1 Tail 499.1560688 ± 21.2644212 ns 4.3% min 462.8792584 ns max 517.1073457 ns neighbourActors: 2 446.6788606 ± 54.682124 ns 12.3% min 399.9429304 ns max 525.7493639 ns neighbourActors: 2 Lazy 451.7330315 ± 65.17182192 ns 14.5% min 395.013378 ns max 573.5910068 ns neighbourActors: 2 Tail 513.5562217 ± 30.36987917 ns 6% min 482.392781 ns max 571.1528199 ns neighbourActors: 3 431.5101269 ± 33.18197658 ns 7.7% min 393.5324051 ns max 477.4853026 ns neighbourActors: 3 Lazy 403.3295019 ± 20.74726594 ns 5.2% min 378.9584687 ns max 431.1918582 ns neighbourActors: 3 Tail 504.3440892 ± 22.27333111 ns 4.5% min 483.4533421 ns max 536.3609957 ns neighbourActors: 4 434.9705463 ± 44.20181893 ns 10.2% min 377.1092594 ns max 500.1062594 ns neighbourActors: 4 Lazy 524.3556842 ± 106.0274187 ns 20.3% min 383.5629296 ns max 641.6144162 ns neighbourActors: 4 Tail 533.2750337 ± 73.70148984 ns 13.9% min 487.7758563 ns max 679.5387587 ns neighbourActors: 5 489.4835905 ± 125.0974996 ns 25.6% min 390.2584117 ns max 695.0247577 ns neighbourActors: 5 Lazy 485.8102562 ± 64.26673028 ns 13.3% min 402.4051982 ns max 597.4691851 ns neighbourActors: 5 Tail 502.6770917 ± 16.27978548 ns 3.3% min 486.9962385 ns max 530.4511516 ns neighbourActors: 6 396.0354097 ± 12.49012918 ns 3.2% min 375.928294 ns max 413.1874028 ns neighbourActors: 6 Lazy 486.2469224 ± 98.18258414 ns 20.2% min 395.584295 ns max 606.2837828 ns neighbourActors: 6 Tail 572.1501688 ± 97.86051865 ns 17.2% min 491.4200929 ns max 757.1393263 ns neighbourActors: 7 491.1770769 ± 73.55642403 ns 15% min 418.0437093 ns max 625.2896006 ns neighbourActors: 7 Lazy 482.1276908 ± 103.2988895 ns 21.5% min 391.14539 ns max 638.4437698 ns neighbourActors: 7 Tail 565.6581699 ± 112.9630553 ns 20% min 500.676173 ns max 791.233809 ns neighbourActors: 8 414.4008774 ± 6.120170314 ns 1.5% min 406.6374092 ns max 421.641791 ns neighbourActors: 8 Lazy 452.7787428 ± 73.17525461 ns 16.2% min 393.7722104 ns max 590.8931039 ns neighbourActors: 8 Tail 532.0838338 ± 40.36483024 ns 7.6% min 477.3909859 ns max 583.015653 ns neighbourActors: 16 397.1824613 ± 15.45274036 ns 3.9% min 383.1635887 ns max 426.4087977 ns neighbourActors: 16 Lazy 381.8176693 ± 8.689570499 ns 2.3% min 369.8247996 ns max 396.8074019 ns neighbourActors: 16 Tail 532.3649779 ± 24.51108208 ns 4.7% min 514.7393946 ns max 578.3138653 ns neighbourActors: 32 513.0457075 ± 110.2056935 ns 21.5% min 414.6051634 ns max 669.9555803 ns neighbourActors: 32 Lazy 479.5345837 ± 110.6344992 ns 23.1% min 379.3558029 ns max 619.6486595 ns neighbourActors: 32 Tail 586.4182783 ± 95.1927231 ns 16.3% min 495.647287 ns max 757.233643 ns neighbourActors: 64 437.7573037 ± 13.54120898 ns 3.1% min 415.427926 ns max 454.568352 ns neighbourActors: 64 Lazy 442.6417046 ± 60.99878257 ns 13.8% min 394.8458153 ns max 560.0194362 ns neighbourActors: 64 Tail 753.1914702 ± 135.7809217 ns 18.1% min 517.4826058 ns max 864.7466508 ns neighbourActors: 128 631.0708128 ± 94.30245094 ns 15% min 457.178115 ns max 720.3307302 ns neighbourActors: 128 Lazy 442.9395046 ± 24.31598149 ns 5.5% min 417.8018974 ns max 473.4296846 ns neighbourActors: 128 Tail 571.1381707 ± 35.23551113 ns 6.2% min 515.6042988 ns max 623.1875423 ns neighbourActors: 256 495.6824049 ± 62.73826793 ns 12.7% min 418.5975691 ns max 568.8240002 ns neighbourActors: 256 Lazy 430.2533068 ± 5.586234279 ns 1.3% min 420.8441125 ns max 436.8488533 ns neighbourActors: 256 Tail |84.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |84.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> TBlobStorageHullFresh::SimpleForward [GOOD] >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> TBlobStorageHullFreshSegment::PerfAppendix >> TPDiskTest::TestThatEveryValueOfEStateEnumKeepsItIntegerValue [GOOD] >> TPDiskTest::TestPDiskOwnerRecreation >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh |84.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut >> TBlobStorageHullFresh::Perf [GOOD] >> PDiskCompatibilityInfo::OldCompatible >> PDiskCompatibilityInfo::OldCompatible [GOOD] >> PDiskCompatibilityInfo::Incompatible |84.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnumRecover [GOOD] |84.1%| [TA] $(B)/ydb/library/yql/providers/common/http_gateway/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::SimpleBackwardMiddle [GOOD] >> PDiskCompatibilityInfo::Incompatible [GOOD] >> PDiskCompatibilityInfo::NewIncompatibleWithDefault [GOOD] >> PDiskCompatibilityInfo::Trunk >> PDiskCompatibilityInfo::Trunk [GOOD] >> PDiskCompatibilityInfo::SuppressCompatibilityCheck [GOOD] >> PDiskCompatibilityInfo::Migration >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh >> PDiskCompatibilityInfo::Migration [GOOD] >> ReadOnlyPDisk::SimpleRestartReadOnly |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |84.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::Perf [GOOD] >> TBsLocalRecovery::WriteRestartReadHuge >> ReadOnlyPDisk::SimpleRestartReadOnly [GOOD] >> ReadOnlyPDisk::StartReadOnlyUnformattedShouldFail [GOOD] >> ReadOnlyPDisk::StartReadOnlyZeroedShouldFail [GOOD] >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh >> TIncrHugeBasicTest::Defrag >> TBsVDiskRepl1::ReplProxyKeepBits >> ReadOnlyPDisk::VDiskStartsOnReadOnlyPDisk [GOOD] >> ReadOnlyPDisk::ReadOnlyPDiskEvents |84.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/formats/arrow/ut/ydb-library-formats-arrow-ut >> TPDiskTest::TestPDiskOwnerRecreation [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner >> TBsVDiskExtremeHuge::Simple3Put3GetFresh >> ReadOnlyPDisk::ReadOnlyPDiskEvents [GOOD] >> ShredPDisk::EmptyShred >> ActorBenchmark::SendActivateReceive1Pool6Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool7Threads >> TBsVDiskExtremeHandoffHuge::SimpleHndPut1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh |84.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut >> ShredPDisk::EmptyShred [GOOD] >> ShredPDisk::SimpleShred |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TBsVDiskExtreme::SimpleGetFromEmptyDB >> TBsLocalRecovery::StartStopNotEmptyDB |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> ActorCoro::Basic [GOOD] >> ActorCoro::PoisonPill |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TPDiskTest::TestPDiskOwnerRecreationWithStableOwner [GOOD] >> TPDiskTest::TestVDiskMock >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction >> ydb-tests-olap::import_test [GOOD] >> ShredPDisk::SimpleShred [GOOD] >> ShredPDisk::SimpleShredDirtyChunks |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> TBsVDiskExtremeHuge::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |83.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] >> TPDiskTest::TestVDiskMock [GOOD] >> TPDiskTest::TestRealFile >> TBsVDiskExtreme::SimpleGetFromEmptyDB [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskRepl1::ReplProxyKeepBits [GOOD] >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh >> ShredPDisk::SimpleShredDirtyChunks [GOOD] >> ShredPDisk::KillVDiskWhilePreShredding |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> TBlobStorageHullFreshSegment::PerfAppendix [GOOD] >> TBlobStorageHullFreshSegment::PerfSkipList >> ShredPDisk::KillVDiskWhilePreShredding [GOOD] >> ShredPDisk::KillVDiskWhileShredding |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |83.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud >> ActorBenchmark::SendActivateReceive1Pool7Threads [GOOD] >> ActorBenchmark::SendActivateReceive1Pool8Threads >> ShredPDisk::KillVDiskWhileShredding [GOOD] >> ShredPDisk::InitVDiskAfterShredding |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBsVDiskRange::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingForwardCompaction [GOOD] >> TBsVDiskRepl1::ReplProxyData |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp >> ShredPDisk::InitVDiskAfterShredding [GOOD] >> ShredPDisk::ReinitVDiskWhilePreShredding >> TBsVDiskGC::GCPutKeepIntoEmptyDB >> HullReplWriteSst::Basic >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |83.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction >> TBlobStorageReplRecoveryMachine::BasicFunctionality >> ShredPDisk::ReinitVDiskWhilePreShredding [GOOD] >> ShredPDisk::ReinitVDiskWhileShredding >> TBsVDiskExtremeHuge::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction >> ShredPDisk::ReinitVDiskWhileShredding [GOOD] >> ShredPDisk::RetryPreShredCompactError |83.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/ut/ydb-core-blobstorage-vdisk-hulldb-cache_block-ut |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBsVDiskRepl1::ReplProxyData [GOOD] >> TBsVDiskRepl1::ReplEraseDiskRestore >> TBsVDiskGC::GCPutKeepIntoEmptyDB [GOOD] >> TBsVDiskGC::GCPutBarrierVDisk0NoSync |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> TBlobStorageReplRecoveryMachine::BasicFunctionality [GOOD] |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp >> ShredPDisk::RetryPreShredCompactError [GOOD] >> ShredPDisk::RetryShredError >> TBsVDiskExtremeHandoff::SimpleHnd6Put1SeqGetCompaction [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh >> TQueryResultSizeTrackerTest::CheckAll [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::PerfSkipList [GOOD] |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne >> TBsVDiskExtremeHuge::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |83.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility >> ShredPDisk::RetryShredError [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction >> TBsVDiskRange::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/ut/ydb-core-blobstorage-vdisk-hulldb-base-ut >> TBsLocalRecovery::WriteRestartReadHuge [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeIncreased |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckAll [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::RetryShredError [GOOD] Test command err: 2025-07-08T11:54:21.337147Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.337522Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16365717964858284452 MagicNextLogChunkReference: 6759492909326552999 MagicLogChunk: 12502282448481735343 MagicDataChunk: 12073802509809383751 MagicSysLogChunk: 11804108524255070513 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975661319556 (2025-07-08T11:54:21.319556Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:21.338863Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:21.339699Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:21.339916Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.340279Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:21.340417Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1581009 CutLogId# [1:7524678167504347877:2050] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:21.340733Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.341146Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1224} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2025-07-08T11:54:21.341239Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1581009 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-07-08T11:54:21.344564Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.344763Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16365717964858284452 MagicNextLogChunkReference: 6759492909326552999 MagicLogChunk: 12502282448481735343 MagicDataChunk: 12073802509809383751 MagicSysLogChunk: 11804108524255070513 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975661319556 (2025-07-08T11:54:21.319556Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:21.346535Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1550618 NonceLog# 1581009 NonceData# 1724597} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:21.347285Z node 1 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:21.347309Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 12288} PDiskId# 1 2025-07-08T11:54:21.347326Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 12288} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.347742Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:21.347903Z node 1 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2025-07-08T11:54:21.421437Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.421648Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14597628522968472537 MagicNextLogChunkReference: 5482991785084810603 MagicLogChunk: 12910416719008163541 MagicDataChunk: 163968655834962382 MagicSysLogChunk: 9026466011144484291 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975661403233 (2025-07-08T11:54:21.403233Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:21.422870Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:21.423394Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:21.423421Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.423678Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:21.423789Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1432232 CutLogId# [2:7524678171595368692:2050] ownerRound# 4 PDiskId# 1 2025-07-08T11:54:21.424015Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.424362Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1224} Going to restart PDisk since received TEvAskWardenRestartPDiskResult PDiskId# 1 2025-07-08T11:54:21.424435Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [1:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1432232 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-07-08T11:54:21.427663Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.427837Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14597628522968472537 MagicNextLogChunkReference: 5482991785084810603 MagicLogChunk: 12910416719008163541 MagicDataChunk: 163968655834962382 MagicSysLogChunk: 9026466011144484291 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975661403233 (2025-07-08T11:54:21.403233Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:21.430251Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1245275 NonceLog# 1432232 NonceData# 1127137} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [1:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:21.435016Z node 2 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl_log.cpp:485} Incompatible version ErrorReason# Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 26 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } } PDiskId# 1 2025-07-08T11:54:21.435078Z node 2 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:612} PDiskId# 1 Can't start due to a log processing error! ErrorStr# "Versions are not compatible neither by common rule nor by provided rule sets, Stored CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 1 Minor: 26 Hotfix: 0 } } Current CompatibilityInfo# { Application: "ydb" Version { Year: 23 Major: 3 Minor: 1 Hotfix: 0 } } " PDiskId# 1 2025-07-08T11:54:21.487784Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.488195Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13914891836786685312 MagicNextLogChunkReference: 2020429036334957554 MagicLogChunk: 14377598404879540908 MagicDataChunk: 3613038399112706338 MagicSysLogChunk: 546237647877377210 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChun ... 67295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" 2025-07-08T11:54:23.976982Z node 19 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 29 Status# OK ShredGeneration# 1 ErrorReason# } 2025-07-08T11:54:23.976995Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:23.977593Z node 19 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [10:4294967295:0:0:0] OwnerId# 3 OwnerRound# 30 PDiskId# 1 2025-07-08T11:54:24.046694Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-07-08T11:54:24.072208Z node 19 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 4} PDiskId# 1 2025-07-08T11:54:24.072523Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:372 2025-07-08T11:54:24.072873Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-07-08T11:54:24.170547Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-07-08T11:54:24.170646Z node 19 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:24.170655Z node 19 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-07-08T11:54:24.213862Z node 20 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:24.214066Z node 20 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 544182714153872800 MagicNextLogChunkReference: 5365457549024446925 MagicLogChunk: 2225389719079486632 MagicDataChunk: 10078422574507315754 MagicSysLogChunk: 9008399803781526990 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975664197488 (2025-07-08T11:54:24.197488Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:24.215162Z node 20 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:24.215771Z node 20 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:24.215797Z node 20 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:24.216137Z node 20 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:24.216251Z node 20 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1429071 CutLogId# [20:7524678183044380108:2050] ownerRound# 31 PDiskId# 1 2025-07-08T11:54:24.216509Z node 20 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:24.217135Z node 20 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:54:24.217148Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 sends compact request to VDisk# [11:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-07-08T11:54:24.217157Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [11:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" 2025-07-08T11:54:24.217203Z node 20 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 31 Status# ERROR ShredGeneration# 1 ErrorReason# } 2025-07-08T11:54:24.217209Z node 20 :BS_PDISK_SHRED ERROR: Shred request failed at PDisk# 1 for shredGeneration# 1 because owner# 3 ownerRound# 31 replied with PreShredCompactVDiskResult status# ERROR and ErrorReason# 2025-07-08T11:54:24.217242Z node 20 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:54:24.217246Z node 20 :BS_PDISK_SHRED NOTICE: Retrying a failed shred at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:54:24.217249Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 sends compact request to VDisk# [11:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-07-08T11:54:24.217252Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [11:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:372 2025-07-08T11:54:24.217276Z node 20 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 31 Status# OK ShredGeneration# 1 ErrorReason# } 2025-07-08T11:54:24.217286Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:24.264162Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-07-08T11:54:24.264898Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-07-08T11:54:24.370963Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-07-08T11:54:24.371040Z node 20 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:24.371046Z node 20 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-07-08T11:54:24.406304Z node 21 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:24.406465Z node 21 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16443748889011662519 MagicNextLogChunkReference: 9571811430239036285 MagicLogChunk: 6573670236575844684 MagicDataChunk: 6056437805153164291 MagicSysLogChunk: 15583096102702810537 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975664390584 (2025-07-08T11:54:24.390584Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:24.407677Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:24.408228Z node 21 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:24.408252Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:24.408546Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:24.408634Z node 21 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [12:_:0:0:0] FirstNonceToKeep# 2050158 CutLogId# [21:7524678183263625899:2050] ownerRound# 32 PDiskId# 1 2025-07-08T11:54:24.408880Z node 21 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:24.409413Z node 21 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:54:24.409427Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 sends compact request to VDisk# [12:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-07-08T11:54:24.409438Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [12:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:372 2025-07-08T11:54:24.409464Z node 21 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 32 Status# OK ShredGeneration# 1 ErrorReason# } 2025-07-08T11:54:24.409471Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:24.459906Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-07-08T11:54:24.460638Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-07-08T11:54:24.555223Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-07-08T11:54:24.555303Z node 21 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:54:24.555309Z node 21 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 >> TBlobStorageHullFresh::SolomonStandCrash [GOOD] >> TBlobStorageHullFreshSegment::IteratorTest >> TBsVDiskGC::GCPutBarrierVDisk0NoSync [GOOD] >> TBsVDiskGC::GCPutBarrierSync >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetFresh [GOOD] >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] >> TBsDbStat::ChaoticParallelWrite_DbStat >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk >> ActorBenchmark::SendActivateReceive1Pool8Threads [GOOD] >> ActorBenchmark::SendActivateReceiveCSV >> TBsVDiskRangeHuge::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh >> TFreshAppendixTest::IterateBackwardAll [GOOD] >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFreshSegment::IteratorTest [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardFresh [GOOD] |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |83.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/helpers/ut/ydb-library-actors-helpers-ut |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> ActorCoro::PoisonPill [GOOD] >> ActorSystemBenchmark::QuickSortActor |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TFreshAppendixTest::IterateBackwardExcluding [GOOD] |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp >> TBsVDiskExtremeHandoff::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/ut/ydb-library-yql-providers-common-http_gateway-ut |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction >> ActorSystemBenchmark::QuickSortActor [GOOD] >> ActorSystemBenchmark::KvActor >> ydb-tests-functional-suite_tests::import_test [GOOD] |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqGet2Compaction [GOOD] |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp >> TBsVDiskManyPutGet::ManyPutGetWaitCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly >> TBsVDiskExtreme::Simple3Put1SeqGetAllFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction >> TBsVDiskGC::GCPutBarrierSync [GOOD] >> TBsVDiskGC::GCPutKeepBarrierSync |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut >> ydb-tests-functional-restarts::import_test [GOOD] >> ActorSystemBenchmark::KvActor [GOOD] >> ActorSystemBenchmark::SumVector [GOOD] >> ActorSystemMon::SerializeEv [GOOD] >> ActorSystemPerformance::PerfTest |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |83.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] >> TPDiskTest::TestRealFile [GOOD] >> TPDiskTest::TestSIGSEGVInTUndelivered [GOOD] >> TPDiskTest::TestPDiskOnDifferentKeys >> TBsVDiskRepl3::SyncLogTest |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/core/ut/ydb-library-actors-core-ut |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TPDiskTest::TestPDiskOnDifferentKeys [GOOD] >> TPDiskTest::WrongPDiskKey >> TBsVDiskGC::TGCManyVPutsDelTabletTest >> TBsVDiskRange::Simple3PutRangeGetAllForwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction >> TPDiskTest::WrongPDiskKey [GOOD] >> TPDiskUtil::AtomicBlockCounterFunctional [GOOD] >> TPDiskUtil::AtomicBlockCounterSeqno [GOOD] >> TPDiskUtil::Light [GOOD] >> TPDiskUtil::LightOverflow [GOOD] >> TPDiskUtil::DriveEstimator |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |83.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/restarts/import_test >> ydb-tests-functional-restarts::import_test [GOOD] |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |83.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |83.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHandoffHuge::SimpleHnd2Put1GetCompaction [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetFreshIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly >> TBsVDiskOutOfSpace::WriteUntilOrangeZone [GOOD] >> TBsVDiskOutOfSpace::WriteUntilYellowZone |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtremeHuge::Simple3Put1SeqSubsErrorCompaction [GOOD] |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/yt/actors/ut/ydb-library-yql-providers-yt-actors-ut >> TBsVDiskExtreme::Simple3Put1SeqGetAllCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut >> SchedulerActor::QuickEvents >> TestAliases::AliasEventDelivery [GOOD] >> TestAliases::AliasEventDeliveryInTestRuntime [GOOD] >> TestDecorator::Basic [GOOD] >> TestDecorator::LocalProcessKey [GOOD] >> TestStateFunc::StateFuncWithExceptions [GOOD] >> WaitingBenchs::SpinPause [GOOD] >> WaitingBenchs::WakingUpTest >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/dq/scheduler/ut/ydb-library-yql-providers-dq-scheduler-ut |83.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/interconnect/ut/ydb-library-actors-interconnect-ut |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizeMinusOne [GOOD] |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp >> TBsVDiskExtreme::Simple3Put3GetFresh >> TBsLocalRecovery::WriteRestartReadHugeIncreased [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllForwardCompaction [GOOD] >> TBsLocalRecovery::WriteRestartReadHugeDecreased >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut >> TBsVDiskRepl3::SyncLogTest [GOOD] >> THugeMigration::ExtendMap_HugeBlobs |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Fresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction >> TBsVDiskBadBlobId::PutBlobWithBadId |83.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest |83.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/yt/actors/ut/ydb-library-yql-providers-yt-actors-ut >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction >> TPDiskUtil::SectorRestoratorOldNewHash [GOOD] >> TPDiskUtil::TChunkIdFormatter [GOOD] >> TPDiskUtil::TOwnerPrintTest [GOOD] >> TPDiskUtil::TChunkStateEnumPrintTest [GOOD] >> TPDiskUtil::TIoResultEnumPrintTest [GOOD] >> TPDiskUtil::TIoTypeEnumPrintTest [GOOD] >> TPDiskUtil::TestNVMeSerial [GOOD] >> TPDiskUtil::TestDeviceList [GOOD] >> TPDiskUtil::TestBufferPool |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut >> TBsVDiskExtreme::Simple3Put3GetFresh [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction >> TBsVDiskRange::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh |83.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> TYardTest::TestInit >> TPDiskTest::TestAbstractPDiskInterface [GOOD] >> TYardTest::TestBadDeviceInit >> TPDiskTest::TestPDiskActorErrorState [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopStart >> TYardTest::TestEmptyLogRead |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> TPDiskTest::TestPDiskActorPDiskStopStart [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopBroken >> TYardTest::TestBadDeviceInit [GOOD] >> TYardTest::TestChunkReadRandomOffset >> TYardTest::TestEmptyLogRead [GOOD] >> TYardTest::TestChunkWriteRead >> TBsVDiskBadBlobId::PutBlobWithBadId [GOOD] >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath >> TPDiskTest::TestPDiskActorPDiskStopBroken [GOOD] >> TPDiskTest::TestPDiskActorPDiskStopUninitialized [GOOD] >> TPDiskTest::TestChunkWriteRelease >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut >> TBsVDiskExtreme::Simple3Put1SeqGet2Compaction [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh >> TYardTest::TestInit [GOOD] >> TYardTest::TestInitOnIncompleteFormat >> TBsVDiskRangeHuge::Simple3PutRangeGetNothingBackwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh |83.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |83.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp >> TYardTest::TestChunkWriteRead [GOOD] >> TYardTest::TestChunkWriteReadWithHddSectorMap >> TYardTest::TestInitOnIncompleteFormat [GOOD] >> THugeMigration::ExtendMap_HugeBlobs [GOOD] >> TYardTest::TestInitOwner >> THugeMigration::ExtendMap_SmallBlobsBecameHuge |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::CheckOnlyQueryResult [GOOD] >> TBsVDiskExtreme::Simple3Put3GetCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh >> TBsVDiskRepl2::ReplEraseDiskRestoreWOOneDisk [GOOD] >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart |83.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TBlobStoragePDiskCrypto::TestMixedStreamCypher [GOOD] |83.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStoragePDiskCrypto::TestInplaceStreamCypher [GOOD] >> TBlockDeviceTest::TestDeviceWithSubmitGetThread >> TYardTest::TestInitOwner [GOOD] >> TYardTest::TestIncorrectRequests >> TBsVDiskBrokenPDisk::WriteUntilDeviceDeath [GOOD] >> TBsVDiskDefrag::DefragEmptyDB >> TBsVDiskExtreme::Simple3Put1GetMissingPartFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction >> TPDiskTest::TestChunkWriteRelease [GOOD] >> TPDiskTest::TestPDiskManyOwnersInitiation >> TBlockDeviceTest::TestDeviceWithSubmitGetThread [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes >> TYardTest::TestIncorrectRequests [GOOD] >> TYardTest::TestLogWriteRead |83.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut >> TPDiskUtil::TestBufferPool [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector >> TPDiskTest::TestPDiskManyOwnersInitiation [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts >> TYardTest::TestLogWriteRead [GOOD] >> TYardTest::TestLogWriteReadMedium >> TYardTest::TestWholeLogRead >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardFresh [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction |83.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut >> TYardTest::TestLogWriteReadMedium [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap >> TYardTest::TestWholeLogRead [GOOD] >> TYardTest::TestSysLogReordering >> TYardTest::TestChunkWriteReadWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadMultiple |83.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TPDiskRaces::KillOwnerWhileDeletingChunk >> TBlobStorageHullFresh::AppendixPerf [GOOD] >> TBlobStorageHullFresh::AppendixPerf_Tune |83.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/dq/runtime/ut/ydb-library-yql-providers-dq-runtime-ut |83.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/metering/ut/ydb-core-metering-ut >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction >> TIncrHugeBlobIdDict::Basic [GOOD] >> TYardTest::TestLogWriteReadMediumWithHddSectorMap [GOOD] >> TYardTest::TestLogWriteReadLarge |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TBsVDiskDefrag::DefragEmptyDB [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage >> TBsLocalRecovery::WriteRestartReadHugeDecreased [GOOD] >> TBsOther1::PoisonPill |83.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] >> TPDiskTest::TestLogWriteReadWithRestarts [GOOD] >> TPDiskTest::TestLogSpliceNonceJump >> TYardTest::TestLogWriteReadLarge [GOOD] >> TYardTest::TestLogWriteCutEqual >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] |83.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetMiddleBackwardCompaction [GOOD] >> TActorSystemTest::LocalService [GOOD] >> TEventProtoWithPayload::SerializeDeserialize >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] >> TIncrHugeBasicTest::Recovery [GOOD] |83.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBlobIdDict::Basic [GOOD] |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> THugeMigration::ExtendMap_SmallBlobsBecameHuge [GOOD] >> THugeMigration::RollbackMap_HugeBlobs >> TBsVDiskRepl1::ReplEraseDiskRestore [GOOD] >> TBsVDiskRepl1::ReadOnly |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::WriteReadDeleteEnum [GOOD] |83.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |83.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleForwardCompaction [GOOD] >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh >> TPDiskTest::TestLogSpliceNonceJump [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingPartCompaction [GOOD] |83.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Recovery [GOOD] >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> TBlockDeviceTest::TestWriteSectorMapAllTypes [GOOD] >> TBlockDeviceTest::WriteReadRestart |83.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/fresh/ut/unittest >> TBlobStorageHullFresh::AppendixPerf_Tune [GOOD] |83.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |83.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |83.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/s3/object_listers/ut/ydb-library-yql-providers-s3-object_listers-ut |83.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut >> TSectorMapPerformance::TestHDD1960GBRead100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector |83.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |83.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSizePlusOne [GOOD] >> TPDiskTest::TestMultipleLogSpliceNonceJump [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyLogWrite >> TBsVDiskExtreme::Simple3Put1SeqSubsOkCompaction [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] |83.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp >> ChangingThreadsCountInBasicExecutorPool::DecreaseIncreaseThreadCount [GOOD] >> ChangingThreadsCountInBasicExecutorPool::ContiniousChangingThreadCount >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize >> TBsVDiskRepl1::ReadOnly [GOOD] |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> TPDiskTest::TestFakeErrorPDiskManyLogWrite [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead >> TBlobStorageCompStrat::Test1 >> TBlobStorageBarriersTreeTest::Tree [GOOD] |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRangeHuge::Simple3PutRangeGetMiddleBackwardFresh [GOOD] |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp >> TEventProtoWithPayload::SerializeDeserialize [GOOD] >> TPDiskTest::TestFakeErrorPDiskLogRead [GOOD] >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> TEventProtoWithPayload::SerializeDeserializeArena >> TPDiskTest::TestFakeErrorPDiskSysLogRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkRead >> TArrowPushDown::SimplePushDown [GOOD] >> TArrowPushDown::MatchSeveralRowGroups [GOOD] >> TArrowPushDown::FilterEverything [GOOD] >> ConfigValidation::StaticGroupSizesGrow [GOOD] >> ConfigValidation::TooManyVDiskChanged [GOOD] >> DatabaseConfigValidation::AllowedFields >> ConfigValidation::StaticGroupSizesShrink [GOOD] >> ConfigValidation::VDiskChanged [GOOD] >> ConfigValidation::SameStaticGroup [GOOD] |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl1::ReadOnly [GOOD] Test command err: 2025-07-08T11:54:29.104939Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-07-08T11:54:29.150878Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 12781900461510216032] 2025-07-08T11:54:30.192561Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> SchedulerActor::QuickEvents [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsCommon >> MdbEndpoingGenerator::Generic_NoTransformHost [GOOD] >> MdbEndpoingGenerator::Legacy [GOOD] >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> TBlobStorageCompStrat::Test1 [GOOD] >> DatabaseConfigValidation::AllowedFields [GOOD] >> DatabaseConfigValidation::NotAllowedFields [GOOD] |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> LongTxServicePublicTypes::SnapshotMaxTxId [GOOD] >> LongTxServicePublicTypes::Snapshot [GOOD] >> LongTxServicePublicTypes::SnapshotReadOnly [GOOD] >> LongTxServicePublicTypes::LongTxId [GOOD] >> TEventProtoWithPayload::SerializeDeserializeArena [GOOD] |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TEventProtoWithPayload::SerializeDeserializeArenaBig |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest |83.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::Tree [GOOD] |83.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/barriers/ut/unittest >> TBlobStorageBarriersTreeTest::MemViewSnapshots [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction >> TestUrlBuilder::UriOnly [GOOD] >> TestS3UrlEscape::EscapeAdditionalSymbols [GOOD] >> TestS3UrlEscape::EscapeEscapedForce [GOOD] >> TestUrlBuilder::BasicWithEncoding [GOOD] >> TestUrlBuilder::Basic [GOOD] >> TestS3UrlEscape::EscapeUnescapeForceRet [GOOD] >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] >> TSchedulerTest::Use75PercentForLargeInNonOverload [GOOD] >> TSchedulerTest::FifoAfterOneHour [GOOD] >> TSchedulerTest::UseOnlyHalfForLargeInOverload [GOOD] >> TSchedulerTest::DoNotReserveForSmall [GOOD] >> TSchedulerTest::SimpleFifo [GOOD] >> TSchedulerTest::NewbieFirst [GOOD] >> TSchedulerTest::ReserveForSmall [GOOD] >> TSchedulerTest::HalfWorkersForSmall [GOOD] >> TSchedulerTest::OneUserForCluster [GOOD] |83.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |83.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/actors/ut/unittest >> TArrowPushDown::FilterEverything [GOOD] >> THugeMigration::RollbackMap_HugeBlobs [GOOD] >> TMonitoring::ReregisterTest >> TEventProtoWithPayload::SerializeDeserializeArenaBig [GOOD] >> TEventProtoWithPayload::Compatibility [GOOD] >> TEventProtoWithPayload::PreSerializedCompatibility [GOOD] >> TEventSerialization::Coroutine [GOOD] |83.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/ut/unittest >> DatabaseConfigValidation::NotAllowedFields [GOOD] |83.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/unittest >> TBlobStorageCompStrat::Test1 [GOOD] >> TMonitoring::ReregisterTest [GOOD] |83.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest >> MdbEndpoingGenerator::Generic_WithTransformHost [GOOD] >> ChangingThreadsCountInBasicExecutorPool::ContiniousChangingThreadCount [GOOD] >> LockFreeMailbox::Basics [GOOD] >> LockFreeMailbox::RegisterActors [GOOD] >> LockFreeMailbox::RegisterAliases [GOOD] >> LockFreeMailbox::MultiThreadedPushPop |83.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/long_tx_service/public/ut/unittest >> LongTxServicePublicTypes::LongTxId [GOOD] |83.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/ut/ydb-library-yql-providers-dq-scheduler-ut |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> TPDiskTest::TestFakeErrorPDiskManyChunkRead [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite >> ArrowInferenceTest::csv_simple [GOOD] >> LockFreeMailbox::MultiThreadedPushPop [GOOD] >> ArrowInferenceTest::tsv_simple [GOOD] >> SchedulerActor::LongEvents |83.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> TEventSerialization::Coroutine [GOOD] >> TYardTest::TestChunkWriteReadMultiple [GOOD] >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap >> Mvp::TokenatorGetMetadataTokenGood ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TMonitoring::ReregisterTest [GOOD] Test command err: RUN TEST SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration SendData iteration |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/dq/scheduler/ut/unittest >> TSchedulerTest::OneUserForCluster [GOOD] |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp >> Mvp::TokenatorGetMetadataTokenGood [GOOD] >> Mvp::TokenatorRefreshMetadataTokenGood >> TPathTests::NormalizeSlashes [GOOD] >> TPathTests::NormalizeNoSlashes [GOOD] >> TPathTests::NormalizeEmpty [GOOD] >> TPathTests::TestRegexFromWildcards [GOOD] >> TPathTests::NormalizeWithSlashes [GOOD] >> ActorSystemPerformance::PerfTest [GOOD] >> AskActor::Err [GOOD] >> TPDiskTest::TestFakeErrorPDiskManyChunkWrite [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve >> ArrowTest::Basic [GOOD] >> SizeCalcer::SimpleStrings [GOOD] >> SizeCalcer::DictionaryStrings [GOOD] >> SizeCalcer::ZeroSimpleStrings [GOOD] >> SizeCalcer::ZeroDictionaryStrings [GOOD] >> SizeCalcer::SimpleInt64 [GOOD] >> SizeCalcer::SimpleTimestamp [GOOD] >> SizeCalcer::NestedFixedSizeList [GOOD] >> SizeCalcer::NestedList |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/common/ut/unittest >> TestUrlBuilder::BasicWithAdditionalEncoding [GOOD] >> ClosedIntervalSet::Union >> SizeCalcer::NestedList [GOOD] >> SizeCalcer::NestedStruct [GOOD] >> SizeCalcer::NestedSparseUnion [GOOD] >> SplitterTest::SimpleNoFrac [GOOD] >> SplitterTest::Simple [GOOD] >> SplitterTest::SimpleBig [GOOD] >> SplitterTest::Generic >> ConsoleDumper::Basic [GOOD] >> ConsoleDumper::CoupleMerge [GOOD] >> ConsoleDumper::CoupleOverwrite [GOOD] >> ConsoleDumper::CoupleMergeOverwriteRepeated [GOOD] >> ConsoleDumper::ReverseMerge [GOOD] >> ConsoleDumper::ReverseOverwrite [GOOD] >> ConsoleDumper::ReverseMergeOverwriteRepeated [GOOD] >> ConsoleDumper::Different [GOOD] >> ConsoleDumper::SimpleNode [GOOD] >> ConsoleDumper::JoinSimilar [GOOD] >> ConsoleDumper::DontJoinDifferent [GOOD] >> ConsoleDumper::SimpleTenant |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |83.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/formats/arrow/ut/ydb-library-formats-arrow-ut |83.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/external_sources/object_storage/inference/ut/gtest >> ArrowInferenceTest::tsv_simple [GOOD] |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp >> ConsoleDumper::SimpleTenant [GOOD] >> ConsoleDumper::SimpleNodeTenant [GOOD] >> ConsoleDumper::SimpleHostId [GOOD] >> ConsoleDumper::SimpleNodeId [GOOD] >> ConsoleDumper::DontJoinNodeTenant [GOOD] >> ConsoleDumper::JoinMultipleSimple [GOOD] >> ConsoleDumper::MergeNode [GOOD] >> ConsoleDumper::MergeOverwriteRepeatedNode [GOOD] >> ConsoleDumper::Ordering [GOOD] >> ConsoleDumper::IgnoreUnmanagedItems [GOOD] >> YamlConfig::CollectLabels [GOOD] >> YamlConfig::MaterializeSpecificConfig [GOOD] >> YamlConfig::MaterializeAllConfigSimple [GOOD] >> YamlConfig::MaterializeAllConfigs [GOOD] >> YamlConfig::AppendVolatileConfig [GOOD] >> YamlConfig::AppendAndResolve [GOOD] >> YamlConfig::GetMetadata [GOOD] >> YamlConfig::ReplaceMetadata [GOOD] >> YamlConfigParser::Iterate [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 >> TestFileCache::Add [GOOD] >> TestFileCache::Find [GOOD] >> TestFileCache::Create [GOOD] >> TestFileCache::Acquire [GOOD] >> TestFileCache::AcquireSingleFile2Times [GOOD] >> TestFileCache::ContainsReleased [GOOD] >> TestFileCache::AcquireRelease [GOOD] >> TestFileCache::AddAfterRemoveAcquired [GOOD] >> TestFileCache::Evict [GOOD] >> YamlConfigParser::ProtoBytesFieldDoesNotDecodeBase64 [GOOD] >> YamlConfigParser::PdiskCategoryFromString [GOOD] >> YamlConfigParser::AllowDefaultHostConfigId [GOOD] >> YamlConfigParser::IncorrectHostConfigIdFails [GOOD] >> YamlConfigParser::NoMixedHostConfigIds [GOOD] >> YamlConfigProto2Yaml::StorageConfig [GOOD] >> ClosedIntervalSet::Union [GOOD] >> ClosedIntervalSet::Difference >> TBsVDiskRepl3::ReplEraseDiskRestoreMultipart [GOOD] >> TYardTest::TestChunkReadRandomOffset [GOOD] >> TBsVDiskRepl3::AnubisTest [GOOD] >> TYardTest::TestChunkWrite20Read02 >> TBsVDiskRepl3::ReplPerf |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> AskActor::Err [GOOD] Test command err: vector size: 100, threads: 1 actor sort duration: 27us thread pool sort duration: 64us vector size: 100, threads: 4 actor sort duration: 84us thread pool sort duration: 64us ----- vector size: 1000, threads: 1 actor sort duration: 51us thread pool sort duration: 184us vector size: 1000, threads: 4 actor sort duration: 84us thread pool sort duration: 61us ----- vector size: 1000000, threads: 1 actor sort duration: 25767us thread pool sort duration: 24326us vector size: 1000000, threads: 4 actor sort duration: 39242us thread pool sort duration: 38020us ----- requestsNumber: 10000, dictSize: 1000, threads: 1 kv search threadpool duration: 8238us kv search single thread duration: 378us ---- requestsNumber: 10000, dictsNumber: 1, dictSize: 1000, threads: 1, actors: 1 kv search actor duration: 4099us kv search actor with external message sender duration: 5497us ---- requestsNumber: 10000, dictsNumber: 1, dictSize: 1000, threads: 1, actors: 8 kv search actor duration: 7310us kv search actor with external message sender duration: 7355us ---- vector size: 1000, threads: 1, proxy actors: 1, sum actors: 1 duration: 4us vector size: 1000000, threads: 1, proxy actors: 1, sum actors: 1 duration: 2381us DURATION_OLD: 303.8232ns DURATION_NEW: 347.0403ns DURATION_SIMPLE: 4.982ns |83.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> SplitterTest::Generic [GOOD] >> TYardTest::TestChunkWrite20Read02 [GOOD] >> TYardTest::TestChunkContinuity2 >> test.py::test[solomon-BadDownsamplingAggregation-] |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/object_listers/ut/unittest >> TPathTests::NormalizeWithSlashes [GOOD] |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |83.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp >> TSectorMapPerformance::TestHDD1960GBRead100MBOnLastSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector >> ColumnShardConfigValidation::AcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::NotAcceptDefaultCompression [GOOD] >> ColumnShardConfigValidation::CorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectPlainCompression [GOOD] >> ColumnShardConfigValidation::CorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::NotCorrectLZ4Compression [GOOD] >> ColumnShardConfigValidation::CorrectZSTDCompression [GOOD] >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> ChannelScheduler::PriorityTraffic [GOOD] >> DynamicProxy::RaceCheck1 |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/ut/ydb-library-yql-providers-s3-object_listers-ut |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] >> TYardTest::TestChunkContinuity2 [GOOD] >> TYardTest::TestChunkContinuity3000 |83.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/dq/runtime/ut/ydb-library-yql-providers-dq-runtime-ut |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |83.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/dq/runtime/ut/unittest >> TestFileCache::Evict [GOOD] |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp >> TYardTest::TestChunkContinuity3000 [GOOD] >> TYardTest::TestChunkContinuity9000 |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/dnsresolver/ut/ydb-library-actors-dnsresolver-ut |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TYardTest::TestChunkContinuity9000 [GOOD] >> BufferWithGaps::Basic [GOOD] >> TYardTest::TestChunkLock >> TBatchedVecTest::TestToStringInt [GOOD] >> BufferWithGaps::IsReadable [GOOD] >> TBatchedVecTest::TestOutputTOutputType [GOOD] >> PtrTest::Test1 [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector >> TBsOther1::PoisonPill [GOOD] >> TBsOther1::ChaoticParallelWrite |83.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut >> TYardTest::TestChunkLock [GOOD] >> TYardTest::TestChunkUnlock >> TStreamRequestUnitsCalculatorTest::Basic [GOOD] >> TTimeGridTest::TimeGrid [GOOD] >> MetaCache::BasicForwarding [GOOD] |83.1%| [BN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/ydb_cli ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut/unittest >> YamlConfigProto2Yaml::StorageConfig [GOOD] Test command err: host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" "\/dev\/disk\/by-partlabel\/kikimr_nvme_02" host_config: "[{\"drive\":[{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"},{\"type\":\"NVME\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_02\"}],\"host_config_id\":1},{\"drive\":[{\"type\":\"SSD\",\"path\":\"\\/dev\\/disk\\/by-partlabel\\/kikimr_nvme_01\"}],\"host_config_id\":2}]" host_configs: - host_config_id: 1 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: NVME expected_slot_count: 9 - path: /dev/disk/by-partlabel/kikimr_nvme_02 type: NVME expected_slot_count: 9 - host_config_id: 2 drive: - path: /dev/disk/by-partlabel/kikimr_nvme_01 type: SSD expected_slot_count: 9 hosts: - host: sas8-6954.search.yandex.net port: 19000 host_config_id: 1 - host: sas8-6955.search.yandex.net port: 19000 host_config_id: 2 item_config_generation: 0 >> MetaCache::TimeoutFallback >> MetaCache::TimeoutFallback [GOOD] |83.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1SeqSubsErrorCompaction [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/formats/arrow/ut/unittest >> SplitterTest::Generic [GOOD] Test command err: 1056768 528384 264192 6176 8192 2052 16384 16384 3,3,4 33333,33333,33334 10231,10231,10231,10231,10231,10231,10231,10231,10231,10231,10231,10232,10232,10232 >> TYardTest::TestChunkUnlock [GOOD] >> TYardTest::TestChunkUnlockHarakiri >> TCollectingS3ListingStrategyTests::IfNoIssuesOccursShouldReturnCollectedPaths [GOOD] >> YtLookupActor::Lookup >> TCollectingS3ListingStrategyTests::IfThereAreMoreRecordsThanSpecifiedByLimitShouldReturnError [GOOD] >> TCollectingS3ListingStrategyTests::IfAnyIterationReturnIssueThanWholeStrategyShouldReturnIt [GOOD] >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] >> TYardTest::TestChunkUnlockHarakiri [GOOD] >> TYardTest::TestChunkUnlockRestart |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |83.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/column_shard_config_validator_ut/unittest >> ColumnShardConfigValidation::NotCorrectZSTDCompression [GOOD] >> TBtreeIndexTPartLarge::SmallKeys1GB >> TYardTest::TestSysLogReordering [GOOD] >> TYardTest::TestStartingPoints |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |83.0%| [BN] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/ydb_cli >> YtLookupActor::Lookup [GOOD] >> TYardTest::TestChunkUnlockRestart [GOOD] >> TYardTest::TestChunkReserve |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> TYardTest::TestChunkReserve [GOOD] >> TYardTest::TestCheckSpace >> TBlockDeviceTest::WriteReadRestart [GOOD] >> TColorLimitsTest::Colors [GOOD] >> TColorLimitsTest::OwnerFreeSpaceShare [GOOD] >> TLogCache::Simple [GOOD] >> TLogCache::EraseRangeOnEmpty [GOOD] >> TLogCache::EraseRangeOutsideOfData [GOOD] >> TLogCache::EraseRangeSingleMinElement [GOOD] >> TLogCache::EraseRangeSingleMidElement [GOOD] >> TLogCache::EraseRangeSingleMaxElement [GOOD] >> TLogCache::EraseRangeSample [GOOD] >> TLogCache::EraseRangeAllExact [GOOD] >> TLogCache::EraseRangeAllAmple [GOOD] >> TPDiskRaces::Decommit >> TYardTest::TestCheckSpace [GOOD] >> TYardTest::TestStartingPoints [GOOD] >> TYardTest::TestWhiteboard >> TYardTest::TestBootingState >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] |82.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/base/ut/gtest >> PtrTest::Test1 [GOOD] |82.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/metering/ut/unittest >> TTimeGridTest::TimeGrid [GOOD] |82.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> ClosedIntervalSet::Difference [GOOD] >> ClosedIntervalSet::Contains [GOOD] >> ClosedIntervalSet::EnumInRange |82.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/core/harmonizer/ut/ydb-library-actors-core-harmonizer-ut |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp >> CachingDnsResolver::UnusableResolver [GOOD] >> CachingDnsResolver::ResolveCaching >> TYardTest::TestBootingState [GOOD] >> TPDiskTest::TestLogSpliceChunkReserve [GOOD] >> ClosedIntervalSet::EnumInRange [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/meta/ut/unittest >> MetaCache::TimeoutFallback [GOOD] >> ClosedIntervalSet::EnumInRangeReverse Test command err: 2025-07-08T11:54:36.242328Z :HTTP INFO: Listening on http://[::]:12437 2025-07-08T11:54:36.242408Z :HTTP INFO: Listening on http://[::]:14529 2025-07-08T11:54:36.242462Z :HTTP DEBUG: Connection created [1:14:2061] 2025-07-08T11:54:36.242470Z :HTTP DEBUG: resolving 127.0.0.1:12437 2025-07-08T11:54:36.242486Z :HTTP DEBUG: connecting 2025-07-08T11:54:36.242601Z :HTTP DEBUG: (#11,127.0.0.1:12437) outgoing connection opened 2025-07-08T11:54:36.242607Z :HTTP DEBUG: (#11,127.0.0.1:12437) <- (GET /server) 2025-07-08T11:54:36.242986Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:43106) incoming connection opened 2025-07-08T11:54:36.243038Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:43106) -> (GET /server) 2025-07-08T11:54:36.243075Z :HTTP DEBUG: Updating ownership http://127.0.0.1:14529 with deadline 2025-07-08T11:55:36.243064Z 2025-07-08T11:54:36.243080Z :HTTP DEBUG: SetRefreshTime "/server" to 2025-07-08T11:55:36.243064Z (+1751975736.243064s) 2025-07-08T11:54:36.243091Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:14529 timeout 30.000000s 2025-07-08T11:54:36.243147Z :HTTP DEBUG: Connection created [1:16:2063] 2025-07-08T11:54:36.243155Z :HTTP DEBUG: resolving 127.0.0.1:14529 2025-07-08T11:54:36.243166Z :HTTP DEBUG: connecting 2025-07-08T11:54:36.243264Z :HTTP DEBUG: (#13,127.0.0.1:14529) outgoing connection opened 2025-07-08T11:54:36.243271Z :HTTP DEBUG: (#13,127.0.0.1:14529) <- (GET /server) 2025-07-08T11:54:36.243325Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:33890) incoming connection opened 2025-07-08T11:54:36.243346Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:33890) -> (GET /server) 2025-07-08T11:54:36.243395Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:33890) <- (200 Found) 2025-07-08T11:54:36.243425Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:33890) connection closed 2025-07-08T11:54:36.243510Z :HTTP DEBUG: (#13,127.0.0.1:14529) -> (200 Found) 2025-07-08T11:54:36.243522Z :HTTP DEBUG: (#13,127.0.0.1:14529) connection closed 2025-07-08T11:54:36.243635Z :HTTP DEBUG: Cache received successfull (200) response for /server 2025-07-08T11:54:36.243659Z :HTTP DEBUG: Connection closed [1:16:2063] 2025-07-08T11:54:36.243668Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:43106) <- (200 Found) 2025-07-08T11:54:36.243700Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:43106) connection closed 2025-07-08T11:54:36.243773Z :HTTP DEBUG: (#11,127.0.0.1:12437) -> (200 Found) 2025-07-08T11:54:36.243781Z :HTTP DEBUG: (#11,127.0.0.1:12437) connection closed 2025-07-08T11:54:36.243870Z :HTTP DEBUG: Connection closed [1:14:2061] 2025-07-08T11:54:36.247170Z :HTTP INFO: Listening on http://[::]:3754 2025-07-08T11:54:36.247223Z :HTTP INFO: Listening on http://[::]:9889 2025-07-08T11:54:36.247265Z :HTTP DEBUG: Connection created [2:14:2061] 2025-07-08T11:54:36.247272Z :HTTP DEBUG: resolving 127.0.0.1:3754 2025-07-08T11:54:36.247282Z :HTTP DEBUG: connecting 2025-07-08T11:54:36.247364Z :HTTP DEBUG: (#11,127.0.0.1:3754) outgoing connection opened 2025-07-08T11:54:36.247371Z :HTTP DEBUG: (#11,127.0.0.1:3754) <- (GET /server) 2025-07-08T11:54:36.247419Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:46986) incoming connection opened 2025-07-08T11:54:36.247441Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:46986) -> (GET /server) 2025-07-08T11:54:36.247458Z :HTTP DEBUG: Updating ownership http://127.0.0.1:9889 with deadline 2025-07-08T12:04:36.247451Z 2025-07-08T11:54:36.247462Z :HTTP DEBUG: SetRefreshTime "/server" to 2025-07-08T12:04:36.247451Z (+1751976276.247451s) 2025-07-08T11:54:36.247469Z :HTTP DEBUG: IncomingForward /server to http://127.0.0.1:9889 timeout 30.000000s 2025-07-08T11:54:36.247483Z :HTTP DEBUG: Connection created [2:16:2063] 2025-07-08T11:54:36.247488Z :HTTP DEBUG: resolving 127.0.0.1:9889 2025-07-08T11:54:36.247497Z :HTTP DEBUG: connecting 2025-07-08T11:54:36.247556Z :HTTP DEBUG: (#13,127.0.0.1:9889) outgoing connection opened 2025-07-08T11:54:36.247560Z :HTTP DEBUG: (#13,127.0.0.1:9889) <- (GET /server) 2025-07-08T11:54:36.247618Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:48226) incoming connection opened 2025-07-08T11:54:36.247640Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:48226) -> (GET /server) 2025-07-08T11:54:36.257791Z :HTTP ERROR: (#13,127.0.0.1:9889) connection closed with error: Connection timed out 2025-07-08T11:54:36.257933Z :HTTP DEBUG: (#14,[::ffff:127.0.0.1]:48226) connection closed 2025-07-08T11:54:36.258049Z :HTTP WARN: Cache received failed response with error "Connection timed out" for /server - retrying locally 2025-07-08T11:54:36.258075Z :HTTP DEBUG: Connection closed [2:16:2063] 2025-07-08T11:54:36.268243Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:46986) <- (200 Found) 2025-07-08T11:54:36.268306Z :HTTP DEBUG: (#12,[::ffff:127.0.0.1]:46986) connection closed 2025-07-08T11:54:36.268463Z :HTTP DEBUG: (#11,127.0.0.1:3754) -> (200 Found) 2025-07-08T11:54:36.268481Z :HTTP DEBUG: (#11,127.0.0.1:3754) connection closed 2025-07-08T11:54:36.268673Z :HTTP DEBUG: Connection closed [2:14:2061] >> TYardTest::TestChunkRecommit |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |82.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/s3/provider/ut/unittest >> TCollectingS3ListingStrategyTests::IfExceptionIsReturnedFromIteratorThanItShouldCovertItToIssue [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/yt/actors/ut/unittest >> YtLookupActor::Lookup [GOOD] Test command err: 2025-07-08 11:54:36.395 INFO ydb-library-yql-providers-yt-actors-ut(pid=33235, tid=0x00007F2452871B80) [default] storage.cpp:178: FileStorage initialized in "/home/runner/.ya/build/build_root/43nv/000e4d/r3tmp/tmpLkZmt6/", temporary dir: "/home/runner/.ya/build/build_root/43nv/000e4d/r3tmp/tmpLkZmt6/33235", files: 0, total size: 0 2025-07-08 11:54:36.441 INFO ydb-library-yql-providers-yt-actors-ut(pid=33235, tid=0x00007F2452871B80) [YT] yql_yt_lookup_actor.cpp:103: New Yt proivider lookup source actor(ActorId=[1:4:2051]) for cluster=Plato, table=Lookup 2025-07-08 11:54:36.494 DEBUG ydb-library-yql-providers-yt-actors-ut(pid=33235, tid=0x00007F2452871B80) [YT] yql_yt_lookup_actor.cpp:172: ActorId=[1:4:2051] Got LookupRequest for 4 keys >> TPDiskTest::SpaceColor [GOOD] >> TPDiskTest::SmallDisk10Gb >> TPDiskTest::SmallDisk10Gb [GOOD] >> TSectorMapPerformance::TestHDD1960GBWrite100MBOnLastSector [GOOD] >> TPDiskTest::SuprisinglySmallDisk >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |82.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> TYardTest::TestChunkRecommit [GOOD] >> TYardTest::TestChunkRestartRecommit >> ClosedIntervalSet::EnumInRangeReverse [GOOD] >> GivenIdRange::IssueNewRange [GOOD] >> GivenIdRange::Trim |82.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |82.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp >> CachingDnsResolver::ResolveCaching [GOOD] >> CachingDnsResolver::ResolveCachingV4 >> GivenIdRange::Trim [GOOD] >> GivenIdRange::Subtract >> TPDiskTest::SuprisinglySmallDisk [GOOD] >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] >> CachingDnsResolver::ResolveCachingV4 [GOOD] >> CachingDnsResolver::EventualTimeout |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |82.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> GivenIdRange::Subtract [GOOD] >> GivenIdRange::Points >> CachingDnsResolver::EventualTimeout [GOOD] >> CachingDnsResolver::MultipleRequestsAndHosts [GOOD] >> CachingDnsResolver::DisabledIPv6 >> TSectorMapPerformance::TestSSD1960GBRead100MBOnFirstSector [GOOD] |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector >> TYardTest::TestWhiteboard [GOOD] >> TYardTest::TestMultiYardLogLatency >> TYardTest::TestChunkRestartRecommit [GOOD] >> TYardTest::TestChunkDelete >> CachingDnsResolver::DisabledIPv6 [GOOD] >> CachingDnsResolver::DisabledIPv4 [GOOD] >> CachingDnsResolver::PoisonPill [GOOD] >> GivenIdRange::Points [GOOD] >> DnsResolver::ResolveLocalHost [GOOD] >> DnsResolver::ResolveYandexRu [GOOD] >> DnsResolver::GetAddrYandexRu >> GivenIdRange::Runs [GOOD] >> GivenIdRange::Allocate >> TSectorMapPerformance::TestSSD1960GBWrite100MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector >> GivenIdRange::Allocate [GOOD] >> DnsResolver::GetAddrYandexRu [GOOD] >> DnsResolver::ResolveTimeout |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TYardTest::TestChunkDelete [GOOD] >> TYardTest::TestChunkForget |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TYardTest::TestChunkForget [GOOD] >> TYardTest::TestChunkFlushReboot |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> DnsResolver::ResolveTimeout [GOOD] >> DnsResolver::ResolveGracefulStop [GOOD] >> ActorBenchmark::SendActivateReceiveCSV [GOOD] >> OnDemandDnsResolver::ResolveLocalHost [GOOD] |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |82.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::TestChunkWriteCrossOwner [GOOD] Test command err: 2025-07-08T11:54:29.310127Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.310332Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 17087718580671322780 MagicNextLogChunkReference: 4042033870124357610 MagicLogChunk: 11463440583538715353 MagicDataChunk: 10848502103725034216 MagicSysLogChunk: 6945851041793067723 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669297969 (2025-07-08T11:54:29.297969Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.310342Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:574} PDiskId# 1Can't start due to a guid error expected# 17087718580671322779 on-disk# 17087718580671322780 PDiskId# 1 2025-07-08T11:54:29.366914Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.367138Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9987971740374288911 MagicNextLogChunkReference: 10776349764539842197 MagicLogChunk: 6790227120195444470 MagicDataChunk: 8325420879081935835 MagicSysLogChunk: 1687873761797542235 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669343917 (2025-07-08T11:54:29.343917Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.368819Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.370371Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.370391Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.371312Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.371584Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1710254 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.372273Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1710254 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-07-08T11:54:29.382431Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.382645Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9987971740374288911 MagicNextLogChunkReference: 10776349764539842197 MagicLogChunk: 6790227120195444470 MagicDataChunk: 8325420879081935835 MagicSysLogChunk: 1687873761797542235 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669343917 (2025-07-08T11:54:29.343917Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.384053Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1057525 NonceLog# 1710254 NonceData# 1743394} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:29.385172Z node 2 :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:29.385194Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-07-08T11:54:29.385211Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.385637Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.389690Z node 2 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 3 PDiskId# 1 2025-07-08T11:54:29.454146Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.454322Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13693509590388275533 MagicNextLogChunkReference: 4262749929734769619 MagicLogChunk: 4472963360379203572 MagicDataChunk: 17712964901938648363 MagicSysLogChunk: 7056293567638035658 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669436229 (2025-07-08T11:54:29.436229Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.456542Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.458115Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.458136Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.459121Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.459285Z node 3 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:1183} Actor recieved device error Details# test PDiskId# 1 2025-07-08T11:54:29.489930Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.489982Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:29.517043Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.517215Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 16303865080849699619 MagicNextLogChunkReference: 13843000357717308897 MagicLogChunk: 9085708356842234730 MagicDataChunk: 7056988778210233703 MagicSysLogChunk: 16544105033746886195 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669501105 (2025-07-08T11:54:29.501105Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.518179Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.518575Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.518597Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.519016Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.519204Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1178818 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.519683Z node 5 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 destination chunk has CommitState# DATA_COMMITTED_DELETE_IN_PROGRESS ownerId# 3 PDiskId# 1 2025-07-08T11:54:29.548326Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.548511Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 9745614552821529440 MagicNextLogChunkReference: 12644651699580066556 MagicLogChunk: 2027047438302231505 ... 30 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [a:4294967295:0:0:0] OwnerId# 4 OwnerRound# 95 PDiskId# 1 2025-07-08T11:54:37.513856Z node 30 :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 4 VDiskId# [a:_:0:0:0] ChunkIdx# 10 SectorIdx# 5 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 10 LogEndSectorIdx# 5 PDiskId# 1 2025-07-08T11:54:37.513873Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2025-07-08T11:54:37.516365Z node 30 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [b:4294967295:0:0:0] OwnerId# 5 OwnerRound# 96 PDiskId# 1 2025-07-08T11:54:37.517142Z node 30 :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 5 VDiskId# [b:_:0:0:0] ChunkIdx# 10 SectorIdx# 6 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 10 LogEndSectorIdx# 6 PDiskId# 1 2025-07-08T11:54:37.517156Z node 30 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 5 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2025-07-08T11:54:37.604705Z node 31 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.604962Z node 31 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 10737418240 bytes (10 GB) Guid: 17910661467003178995 MagicNextLogChunkReference: 9436727969853508729 MagicLogChunk: 18203123787100698691 MagicDataChunk: 7248019925758859215 MagicSysLogChunk: 18308161988409803547 MagicFormatChunk: 17332287817462050952 ChunkSize: 35651584 bytes (35 MB) SectorSize: 4096 SysLogSectorCount: 48 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677579696 (2025-07-08T11:54:37.579696Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.606009Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 46 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.608069Z node 31 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.608095Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.609293Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.609499Z node 31 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [d:_:0:0:0] FirstNonceToKeep# 1842779 CutLogId# [31:7524678237674867562:2050] ownerRound# 97 PDiskId# 1 2025-07-08T11:54:37.611192Z node 31 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.727890Z node 33 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.738375Z node 33 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:37.739525Z node 33 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1 2025-07-08T11:54:37.786634Z node 33 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:462} Formatting error What# ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp:1654: Incorrect disk parameters! Total chunks# 0, System chunks needed# 1, cant run with < 3 free chunks! Debug format# {TDiskFormat Version: 3 DiskSize: 16777216 bytes (0 GB) Guid: 2451101899443070249 MagicNextLogChunkReference: 6644229746418799913 MagicLogChunk: 6532950291504583284 MagicDataChunk: 1469951944019456643 MagicSysLogChunk: 7855299312620870764 MagicFormatChunk: 17332287817462050952 ChunkSize: 35651584 bytes (35 MB) SectorSize: 4096 SysLogSectorCount: 32 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677778190 (2025-07-08T11:54:37.778190Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} 2025-07-08T11:54:37.789057Z node 33 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.789339Z node 33 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:378} PDiskId# 1 Can not be formated! Reason# ydb/core/blobstorage/pdisk/blobstorage_pdisk_impl.cpp:1654: Incorrect disk parameters! Total chunks# 0, System chunks needed# 1, cant run with < 3 free chunks! Debug format# {TDiskFormat Version: 3 DiskSize: 16777216 bytes (0 GB) Guid: 2451101899443070249 MagicNextLogChunkReference: 6644229746418799913 MagicLogChunk: 6532950291504583284 MagicDataChunk: 1469951944019456643 MagicSysLogChunk: 7855299312620870764 MagicFormatChunk: 17332287817462050952 ChunkSize: 35651584 bytes (35 MB) SectorSize: 4096 SysLogSectorCount: 32 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677778190 (2025-07-08T11:54:37.778190Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} Switching to StateError. Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2451101899443070249 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:54:37.866695Z node 34 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.866875Z node 34 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 18097654101787949296 MagicNextLogChunkReference: 11795130120576017517 MagicLogChunk: 11302523260305354564 MagicDataChunk: 3859263665517687664 MagicSysLogChunk: 6288196965628884102 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677849024 (2025-07-08T11:54:37.849024Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.868179Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.868984Z node 34 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.869000Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.869248Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.869351Z node 34 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [e:_:0:0:0] FirstNonceToKeep# 1070431 CutLogId# [34:7524678239128478025:2050] ownerRound# 98 PDiskId# 1 2025-07-08T11:54:37.869616Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.869789Z node 34 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 4 vDiskId# [f:_:0:0:0] FirstNonceToKeep# 1070432 CutLogId# [34:7524678239128478025:2050] ownerRound# 99 PDiskId# 1 2025-07-08T11:54:37.870011Z node 34 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.870642Z node 34 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 3 chunk is owner by another owner. chunk's owner# 4 request's owner# 3 PDiskId# 1 2025-07-08T11:54:37.870672Z node 34 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 chunk is owner by another owner. chunk's owner# 3 request's owner# 4 PDiskId# 1 >> CpuCountTest::TestWithoutSharedThread |82.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blob_depot/ut/unittest >> GivenIdRange::Allocate [GOOD] >> TYardTest::TestChunkFlushReboot [GOOD] >> TYardTest::TestChunkDeletionWhileWriting |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> CpuCountTest::TestWithoutSharedThread [GOOD] >> CpuCountTest::TestWithSharedThread [GOOD] >> HarmonizerTests::TestHarmonizerCreation [GOOD] >> HarmonizerTests::TestAddPool [GOOD] >> HarmonizerTests::TestHarmonize [GOOD] >> HarmonizerTests::TestToNeedyNextToHoggish [GOOD] >> HarmonizerTests::TestToNeedyNextToStarved [GOOD] >> HarmonizerTests::TestExchangeThreads [GOOD] >> HarmonizerTests::TestThreadCounts [GOOD] >> HarmonizerTests::TestSharedHalfThreads [GOOD] >> HarmonizerTests::TestSharedHalfThreadsStarved [GOOD] >> SharedInfoTests::TestInitialization [GOOD] >> SharedInfoTests::TestPull [GOOD] >> SharedInfoTests::TestCpuConsumptionCalculation [GOOD] >> ValueHistoryTests::TestConstructorAndInitialization [GOOD] >> ValueHistoryTests::TestBufferSizePowerOfTwo [GOOD] >> ValueHistoryTests::TestCircularBufferFilling [GOOD] >> ValueHistoryTests::TestBasicRegistration [GOOD] >> ValueHistoryTests::TestRegistrationWithBackwardTime [GOOD] >> ValueHistoryTests::TestRegistrationWithLargeTimeGap [GOOD] |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown >> ValueHistoryTests::TestRegistrationOnSecondBoundary [GOOD] >> ValueHistoryTests::TestAccumulationWithinSecond [GOOD] >> TYardTest::TestChunkDeletionWhileWriting [GOOD] >> TYardTest::TestChunkPriorityBlock >> ValueHistoryTests::TestTransitionBetweenSeconds [GOOD] >> ValueHistoryTests::TestGetAvgPartForLastSeconds [GOOD] >> ValueHistoryTests::TestGetAvgPartWithTail [GOOD] >> ValueHistoryTests::TestGetAvgPartWithIncompleteData [GOOD] >> ValueHistoryTests::TestGetAvgPartOnBufferBoundary [GOOD] >> ValueHistoryTests::TestGetMaxForLastSeconds [GOOD] >> ValueHistoryTests::TestGetMax [GOOD] >> ValueHistoryTests::TestGetMaxWithNegativeValues [GOOD] >> ValueHistoryTests::TestGetMinForLastSeconds [GOOD] >> ValueHistoryTests::TestGetMin [GOOD] >> ValueHistoryTests::TestGetMinWithNegativeValues [GOOD] >> ValueHistoryTests::TestBufferOverflow [GOOD] >> ValueHistoryTests::TestSmallTimeIntervals [GOOD] >> ValueHistoryTests::TestLargeTimeIntervals [GOOD] >> ValueHistoryTests::TestPrecisionOverLongDuration [GOOD] >> ValueHistoryTests::TestRoundingDuringTimestampConversion [GOOD] >> ValueHistoryTests::TestAverageCalculationPrecision [GOOD] >> TYardTest::TestChunkPriorityBlock [GOOD] |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |82.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp >> Backpressure::MonteCarlo >> Mvp::OpenIdConnectRequestWithIamTokenYandex [GOOD] >> SchedulerActor::LongEvents [GOOD] >> Mvp::OpenIdConnectRequestWithIamTokenNebius [GOOD] >> SchedulerActor::MediumEvents >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodYandex [GOOD] >> Mvp::OpenIdConnectNonAuthorizeRequestWithOptionMethodNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieYandex [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckValidCookieNebius [GOOD] >> Mvp::OpenIdConnectProxyOnHttpsHost [GOOD] >> Mvp::OpenIdConnectFixLocationHeader [GOOD] >> Mvp::OpenIdConnectExchangeNebius [GOOD] >> Mvp::OpenIdConnectSessionServiceCheckAuthorizationFail [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectFullAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |82.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/dnsresolver/ut/unittest >> OnDemandDnsResolver::ResolveLocalHost [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlow [GOOD] >> Mvp::OpenIdConnectWrongStateAuthorizationFlowAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAuthorizationFail [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalid [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateAccessTokenInvalidAjax [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed >> TYardTest::TestMultiYardLogLatency [GOOD] >> TYardTest::TestMultiYardStartingPoints |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp >> Mvp::OpenIdConnectSessionServiceCreateOpenIdScopeMissed [GOOD] >> Mvp::OpenIdConnectAllowedHostsList [GOOD] |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |82.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/harmonizer/ut/unittest >> ValueHistoryTests::TestAverageCalculationPrecision [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> ActorBenchmark::SendActivateReceiveCSV [GOOD] Test command err: 1332.186452 ± 438.2060534 ns 32.9% min 1021.171576 ns max 2193.530243 ns 1116.164645 ± 66.80611699 ns 6% min 1017.913606 ns max 1220.171666 ns Lazy 1114.243175 ± 80.73246326 ns 7.3% min 1019.817015 ns max 1250.756655 ns Tail 1080.840699 ± 54.25543654 ns 5.1% min 993.9900483 ns max 1150.82014 ns 1170.051497 ± 65.47557284 ns 5.6% min 1081.412424 ns max 1247.133454 ns Lazy 1031.612133 ± 40.16332646 ns 3.9% min 980.4113249 ns max 1085.294496 ns Tail 6210117.036 ± 2327570.551 ns 37.5% min 4850478.963 ns max 10845545.2 ns 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 1 4809755.061 ± 1781324.968 ns 37.1% min 3026683.195 ns max 7320377.771 ns 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 1 Lazy 4971944.594 ± 1396273.006 ns 28.1% min 3153003.531 ns max 6309859.19 ns 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 1 Tail 4350080.591 ± 1260399.856 ns 29% min 2810295.431 ns max 5840419.781 ns 3532.8 ± 102.4 2.9% min 3328 max 3584 1740.8 ± 102.4 5.9% min 1536 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 2 3920169.249 ± 960202.1983 ns 24.5% min 2457498.424 ns max 5011655.896 ns 3328 ± 323.8172324 9.8% min 2816 max 3584 1536 ± 323.8172324 21.1% min 1024 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 2 Lazy 2894916.354 ± 514468.7533 ns 17.8% min 1963529.889 ns max 3434133.713 ns 2918.4 ± 716.8 24.6% min 2048 max 3584 1126.4 ± 716.8 63.7% min 256 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 2 Tail 3135175.485 ± 1592773.703 ns 50.9% min 1829351.933 ns max 6137447.635 ns 3430.4 ± 347.2552951 10.2% min 2816 max 3840 921.6 ± 261.0697991 28.4% min 512 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 3 3093077.327 ± 961111.1291 ns 31.1% min 1827381.263 ns max 4529226.705 ns 3072 ± 647.6344648 21.1% min 1792 max 3584 768 ± 396.5934947 51.7% min 0 max 1024 1280 ± 0 0% min 1280 max 1280 actorPairs: 3 Lazy 2167173.654 ± 236851.863 ns 11% min 1876234.028 ns max 2496030.732 ns 3584 ± 161.9086162 4.6% min 3328 max 3840 1075.2 ± 102.4 9.6% min 1024 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 3 Tail 2957719.375 ± 669513.4168 ns 22.7% min 1620701.504 ns max 3353220.977 ns 3072 ± 0 0% min 3072 max 3072 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 4 2390161.458 ± 1034014.658 ns 43.3% min 1536891.904 ns max 3696833.22 ns 3072 ± 0 0% min 3072 max 3072 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 4 Lazy 1490171.793 ± 316835.6318 ns 21.3% min 902621.9027 ns max 1865022.46 ns 3072 ± 0 0% min 3072 max 3072 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 4 Tail 2960335.575 ± 224219.8866 ns 7.6% min 2699978.017 ns max 3269546.698 ns 3686.4 ± 204.8 5.6% min 3328 max 3840 665.6 ± 125.4138748 18.9% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 5 3359676.074 ± 197909.8861 ns 5.9% min 2990849.281 ns max 3587326.727 ns 3430.4 ± 204.8 6% min 3072 max 3584 512 ± 0 0% min 512 max 512 768 ± 0 0% min 768 max 768 actorPairs: 5 Lazy 1748937.98 ± 131665.7103 ns 7.6% min 1571935.946 ns max 1922442.526 ns 3276.8 ± 191.5728582 5.9% min 3072 max 3584 512 ± 0 0% min 512 max 512 768 ± 0 0% min 768 max 768 actorPairs: 5 Tail 1881305.846 ± 445834.3272 ns 23.7% min 1473264.128 ns max 2435145.455 ns 3072 ± 0 0% min 3072 max 3072 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 6 2233418.432 ± 634327.053 ns 28.5% min 1570518.587 ns max 3211923.15 ns 3072 ± 0 0% min 3072 max 3072 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 6 Lazy 1584789.805 ± 123194.5187 ns 7.8% min 1430336.404 ns max 1778219.768 ns 3072 ± 0 0% min 3072 max 3072 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 6 Tail 2395868.382 ± 155447.6283 ns 6.5% min 2268499.562 ns max 2700397.01 ns 3532.8 ± 102.4 2.9% min 3328 max 3584 460.8 ± 102.4 22.3% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 2754230.978 ± 239720.4196 ns 8.8% min 2525421.378 ns max 3118375.475 ns 3584 ± 0 0% min 3584 max 3584 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 Lazy 1358294.69 ± 32981.21727 ns 2.5% min 1305910.558 ns max 1391231.739 ns 3430.4 ± 204.8 6% min 3072 max 3584 409.6 ± 125.4138748 30.7% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 Tail 2341692.604 ± 181100.5362 ns 7.8% min 2192755.073 ns max 2693569.832 ns 2048 ± 0 0% min 2048 max 2048 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 8 2036484.967 ± 740137.7952 ns 36.4% min 1420465.012 ns max 2960552.489 ns 2048 ± 0 0% min 2048 max 2048 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 8 Lazy 1064944.952 ± 226756.0491 ns 21.3% min 797856.6451 ns max 1339411.944 ns 2048 ± 0 0% min 2048 max 2048 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 8 Tail 8923492.6 ± 2725874.038 ns 30.6% min 6051059.925 ns max 13926629.31 ns 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 actorPairs: 1 4201963.003 ± 504530.9559 ns 12.1% min 3303796.859 ns max 4864856.031 ns 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 actorPairs: 1 Lazy 3587825.71 ± 484757.3978 ns 13.6% min 3013220.293 ns max 4376260.228 ns 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 4864 ± 0 0% min 4864 max 4864 actorPairs: 1 Tail 6513406.728 ± 1579829.386 ns 24.3% min 3770301.314 ns max 7938119.606 ns 4608 ± 0 0% min 4608 max 4608 2304 ± 0 0% min 2304 max 2304 2304 ± 0 0% min 2304 max 2304 actorPairs: 2 3636210.565 ± 1173270.489 ns 32.3% min 2998992.578 ns max 5982146.625 ns 4608 ± 0 0% min 4608 max 4608 2304 ± 0 0% min 2304 max 2304 2304 ± 0 0% min 2304 max 2304 actorPairs: 2 Lazy 3895416.193 ± 848415.5081 ns 21.8% min 2282490.716 ns max 4514096.853 ns 4608 ± 0 0% min 4608 max 4608 2304 ± 0 0% min 2304 max 2304 2304 ± 0 0% min 2304 max 2304 actorPairs: 2 Tail 4210352.684 ± 1002967.316 ns 23.9% min 2428980.847 ns max 5217901.628 ns 4608 ± 0 0% min 4608 max 4608 1536 ± 0 0% min 1536 max 1536 1536 ± 0 0% min 1536 max 1536 actorPairs: 3 5594863.297 ± 503324.2236 ns 9% min 4601678.391 ns max 5975973.998 ns 4608 ± 0 0% min 4608 max 4608 1536 ± 0 0% min 1536 max 1536 1536 ± 0 0% min 1536 max 1536 actorPairs: 3 Lazy 2278044.179 ± 580761.3104 ns 25.5% min 1616832.335 ns max 3115799.37 ns 4556.8 ± 102.4 2.3% min 4352 max 4608 1484.8 ± 102.4 6.9% min 1280 max 1536 1536 ± 0 0% min 1536 max 1536 actorPairs: 3 Tail 2279939.671 ± 584015.3899 ns 25.7% min 1950278.488 ns max 3447227.741 ns 4096 ± 0 0% min 4096 max 4096 1024 ± 0 0% min 1024 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 4 2868515.793 ± 1054704.93 ns 36.8% min 2330713.988 ns max 4977755.564 ns 4096 ± 0 0% min 4096 max 4096 1024 ± 0 0% min 1024 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 4 Lazy 1735130.862 ± 464754.9895 ns 26.8% min 1192741.386 ns max 2407290.448 ns 4096 ± 0 0% min 4096 max 4096 1024 ± 0 0% min 1024 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 4 Tail 2229094.986 ± 761526.0247 ns 34.2% min 1791875.126 ns max 3749987.177 ns 3840 ± 0 0% min 3840 max 3840 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 5 2055702.707 ± 75078.26576 ns 3.7% min 1911730.141 ns max 2122684.029 ns 3840 ± 0 0% min 3840 max 3840 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 5 Lazy 1233733.122 ± 289308.3079 ns 23.5% min 949679.4127 ns max 1588317.765 ns 3840 ± 0 0% min 3840 max 3840 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 5 Tail 1814674.352 ± 279296.4295 ns 15.4% min 1616269 ns max 2366791.48 ns 4608 ± 0 0% min 4608 max 4608 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 6 2392272.661 ± 603955.7362 ns 25.3% min 1894824.08 ns max 3150215.925 ns 4608 ± 0 0% min 4608 max 4608 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 6 Lazy 1684356.143 ± 126745.8972 ns 7.6% min 1558683.543 ns max 1868599.938 ns 4454.4 ± 204.8 4.6% min 4096 max 4608 665.6 ± 125.4138748 18.9% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 6 Tail 2111502.533 ± 424258.0076 ns 20.1% min 1605923.485 ns max 2567643.43 ns 3584 ± 0 0% min 3584 max 3584 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 2797830.494 ± 848103.0384 ns 30.4% min 1764166.466 ns max 3774023.133 ns 3584 ± 0 0% min 3584 max 3584 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 Lazy 1276966.771 ± 304537.5721 ns 23.9% min 947551.37 ns max 1775565.478 ns 3584 ± 0 0% min 3584 max 3584 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 7 Tail 2562482.6 ± 619974.3223 ns 24.2% min 1365112.847 ns max 2981408.911 ns 4096 ± 0 0% min 4096 max 4096 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 8 2624828.338 ± 802712.0991 ns 30.6% min 1713718.923 ns max 3575803.421 ns 4096 ± 0 0% min 4096 max 4096 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 8 Lazy 1073135.962 ± 328732.465 ns 30.7% min 885175.5608 ns max 1729783.992 ns 4096 ± 0 0% min 4096 max 4096 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 8 Tail 2309763.744 ± 669319.7358 ns 29% min 1485063.087 ns max 2913950.057 ns 4556.8 ± 102.4 2.3% min 4352 max 4608 460.8 ± 102.4 22.3% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 9 2764132.641 ± 599772.8197 ns 21.7% min 1651195.765 ns max 3267227.202 ns 4403.2 ± 298.544737 6.8% min 3840 max 4608 409.6 ± 125.4138748 30.7% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 9 Lazy 1396586.228 ± 345397.9398 ns 24.8% min 952616.3723 ns max 1891405.98 ns 3430.4 ± 618.6519538 18.1% min 2560 max 4352 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 9 Tail 1892049.304 ± 393165.2948 ns 20.8% min 1479073.895 ns max 2389332.895 ns 2355.2 ± 409.6 17.4% min 1536 max 2560 204.8 ± 102.4 50.1% min 0 max 256 256 ± 0 0% min 256 max 256 actorPairs: 10 2243754.925 ± 700614.7576 ns 31.3% min 1481607.9 ns max 3215841.435 ns 2560 ± 0 0% min 2560 max 2560 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 10 Lazy 1058931.302 ± 352415.3865 ns 33.3% min 716354.5132 ns max 1550133.281 ns 2560 ± 0 0% min 2560 max 2560 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 10 Tail 10617168.63 ± 2759450.452 ns 26% min 8578921.085 ns max 16073868.53 ns 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 actorPairs: 1 6364695.734 ± 1927448.476 ns 30.3% min 5336843.479 ns max 10217726.49 ns 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 actorPairs: 1 Lazy 4685960.511 ± 949819.3382 ns 20.3% min 4164669.831 ns max 6584355.652 ns 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 5888 ± 0 0% min 5888 max 5888 actorPairs: 1 Tail 9044613.014 ± 1096303.904 ns 12.2% min 6999468.326 ns max 10297227.88 ns 5632 ± 0 0% min 5632 max 5632 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 2 6288989.737 ± 1773278.927 ns 28.2% min 3706119.225 ns max 7968716.109 ns 5529.6 ± 125.4138748 2.3% min 5376 max 5632 2713.6 ± 125.4138748 4.7% min 2560 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 2 Lazy 3342970.287 ± 657043.2372 ns 19.7% min 2769964.862 ns max 4410221.705 ns 5632 ± 0 0% min 5632 max 5632 2816 ± 0 0% min 2816 max 2816 2816 ± 0 0% min 2816 max 2816 actorPairs: 2 Tail 3465178.478 ± 637035.5648 ns 18.4% min 3026529.832 ns max 4732134.527 ns 5376 ± 0 0 ... 5.372 ns 5120 ± 0 0% min 5120 max 5120 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 10 Tail 1909346.292 ± 264482.2824 ns 13.9% min 1707675.602 ns max 2414449.413 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 2001104.528 ± 51855.17039 ns 2.6% min 1933158.92 ns max 2081832.224 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 Lazy 1037585.119 ± 39876.22742 ns 3.9% min 999731.0178 ns max 1112420.052 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 Tail 1935223.011 ± 511581.3609 ns 26.5% min 1615272.639 ns max 2956107.957 ns 6144 ± 0 0% min 6144 max 6144 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 3116000.447 ± 855905.354 ns 27.5% min 1915264.516 ns max 4151549.741 ns 5632 ± 536.9901303 9.6% min 4864 max 6144 358.4 ± 125.4138748 35% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 Lazy 1250430.088 ± 140329.108 ns 11.3% min 1101332.779 ns max 1487757.472 ns 5478.4 ± 618.6519538 11.3% min 4608 max 6144 307.2 ± 102.4 33.4% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 Tail 1838123.719 ± 206862.7841 ns 11.3% min 1651394.235 ns max 2233434.47 ns 4403.2 ± 570.1390708 13% min 3584 max 4864 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 13 1980297.422 ± 175992.9602 ns 8.9% min 1782677.331 ns max 2308291.856 ns 5376 ± 667.5663263 12.5% min 4352 max 6144 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 13 Lazy 1192948.076 ± 221420.4296 ns 18.6% min 1012949.341 ns max 1623365.817 ns 4454.4 ± 474.809267 10.7% min 3584 max 4864 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 13 Tail 1903984.988 ± 437701.2126 ns 23% min 1613597.67 ns max 2775912.956 ns 3584 ± 0 0% min 3584 max 3584 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 14 1872080.385 ± 39699.35233 ns 2.2% min 1811903.587 ns max 1913723.866 ns 3584 ± 0 0% min 3584 max 3584 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 14 Lazy 1002457.278 ± 81896.55813 ns 8.2% min 871149.3235 ns max 1121982.712 ns 3584 ± 0 0% min 3584 max 3584 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 14 Tail 12469574.39 ± 2425112.931 ns 19.5% min 9705566.847 ns max 15389707.3 ns 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 actorPairs: 1 8558657.266 ± 2582917.467 ns 30.2% min 6696077.394 ns max 13634189.33 ns 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 actorPairs: 1 Lazy 5945295.5 ± 503675.7597 ns 8.5% min 5388860.742 ns max 6878064.641 ns 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 7936 ± 0 0% min 7936 max 7936 actorPairs: 1 Tail 9993049.08 ± 1816197.416 ns 18.2% min 7042782.967 ns max 12769771.96 ns 7680 ± 0 0% min 7680 max 7680 3840 ± 0 0% min 3840 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 2 8799569.145 ± 2560865.531 ns 29.2% min 5493416.503 ns max 11766672.96 ns 7577.6 ± 125.4138748 1.7% min 7424 max 7680 3737.6 ± 125.4138748 3.4% min 3584 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 2 Lazy 7542986.477 ± 495414.6192 ns 6.6% min 6572738.123 ns max 7932752.889 ns 7628.8 ± 102.4 1.4% min 7424 max 7680 3788.8 ± 102.4 2.8% min 3584 max 3840 3840 ± 0 0% min 3840 max 3840 actorPairs: 2 Tail 8147076.362 ± 1041892.279 ns 12.8% min 6714744.629 ns max 9084956.213 ns 7628.8 ± 102.4 1.4% min 7424 max 7680 2508.8 ± 102.4 4.1% min 2304 max 2560 2560 ± 0 0% min 2560 max 2560 actorPairs: 3 5750177.939 ± 1177119.498 ns 20.5% min 5041913.597 ns max 8097575.462 ns 7526.4 ± 125.4138748 1.7% min 7424 max 7680 2406.4 ± 125.4138748 5.3% min 2304 max 2560 2560 ± 0 0% min 2560 max 2560 actorPairs: 3 Lazy 5181088.192 ± 969913.0798 ns 18.8% min 3303660.536 ns max 5940500.939 ns 7526.4 ± 125.4138748 1.7% min 7424 max 7680 2406.4 ± 125.4138748 5.3% min 2304 max 2560 2560 ± 0 0% min 2560 max 2560 actorPairs: 3 Tail 5601685.864 ± 1175770.584 ns 21% min 3406227.585 ns max 6503365.734 ns 7168 ± 0 0% min 7168 max 7168 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 4 10339699.32 ± 271331.2654 ns 2.7% min 9895607.177 ns max 10592401.34 ns 7065.6 ± 204.8 2.9% min 6656 max 7168 1740.8 ± 102.4 5.9% min 1536 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 4 Lazy 3580892.48 ± 795017.1675 ns 22.3% min 2289570.493 ns max 4397089.586 ns 7168 ± 0 0% min 7168 max 7168 1792 ± 0 0% min 1792 max 1792 1792 ± 0 0% min 1792 max 1792 actorPairs: 4 Tail 5329174.506 ± 1261566.437 ns 23.7% min 2809189.811 ns max 6064341.398 ns 7577.6 ± 125.4138748 1.7% min 7424 max 7680 1433.6 ± 125.4138748 8.8% min 1280 max 1536 1536 ± 0 0% min 1536 max 1536 actorPairs: 5 5741166.585 ± 1829447.452 ns 31.9% min 4348288.945 ns max 9134412.408 ns 7219.2 ± 102.4 1.5% min 7168 max 7424 1280 ± 0 0% min 1280 max 1280 1536 ± 0 0% min 1536 max 1536 actorPairs: 5 Lazy 2996200.161 ± 377702.8904 ns 12.7% min 2562508.261 ns max 3477425.299 ns 7116.8 ± 191.5728582 2.7% min 6912 max 7424 1280 ± 0 0% min 1280 max 1280 1536 ± 0 0% min 1536 max 1536 actorPairs: 5 Tail 5339748.964 ± 299637.0864 ns 5.7% min 4761671.52 ns max 5572993.072 ns 7424 ± 280.4339494 3.8% min 6912 max 7680 1126.4 ± 125.4138748 11.2% min 1024 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 6 7369772.419 ± 415830.183 ns 5.7% min 6916901.672 ns max 7865635.845 ns 7116.8 ± 409.6 5.8% min 6400 max 7680 1075.2 ± 102.4 9.6% min 1024 max 1280 1280 ± 0 0% min 1280 max 1280 actorPairs: 6 Lazy 2934881.858 ± 147575.7276 ns 5.1% min 2692578.483 ns max 3098397.133 ns 7116.8 ± 298.544737 4.2% min 6656 max 7424 1024 ± 0 0% min 1024 max 1024 1280 ± 0 0% min 1280 max 1280 actorPairs: 6 Tail 3207894.003 ± 981886.8782 ns 30.7% min 2449820.229 ns max 4955428.657 ns 7168 ± 0 0% min 7168 max 7168 1024 ± 0 0% min 1024 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 7 4026897.96 ± 642934.588 ns 16% min 3448892.686 ns max 4900825.266 ns 7116.8 ± 102.4 1.5% min 6912 max 7168 972.8 ± 102.4 10.6% min 768 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 7 Lazy 2073860.334 ± 413221.9243 ns 20% min 1396973.345 ns max 2608066.218 ns 7065.6 ± 125.4138748 1.8% min 6912 max 7168 921.6 ± 125.4138748 13.7% min 768 max 1024 1024 ± 0 0% min 1024 max 1024 actorPairs: 7 Tail 3098304.714 ± 646775.6861 ns 20.9% min 2373167.589 ns max 4096827.067 ns 6144 ± 0 0% min 6144 max 6144 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 8 3589385.205 ± 618995.1852 ns 17.3% min 3009895.414 ns max 4572988.406 ns 6144 ± 0 0% min 6144 max 6144 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 8 Lazy 1948697.341 ± 319824.0875 ns 16.5% min 1555742.573 ns max 2425263.037 ns 6144 ± 0 0% min 6144 max 6144 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 8 Tail 3017398.795 ± 1080906.108 ns 35.9% min 2075153.398 ns max 4541016.644 ns 6912 ± 0 0% min 6912 max 6912 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 9 2949382.018 ± 232382.8949 ns 7.9% min 2707474.782 ns max 3393082.435 ns 6912 ± 0 0% min 6912 max 6912 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 9 Lazy 1275324.078 ± 19833.46712 ns 1.6% min 1257861.698 ns max 1307805.047 ns 6912 ± 0 0% min 6912 max 6912 768 ± 0 0% min 768 max 768 768 ± 0 0% min 768 max 768 actorPairs: 9 Tail 2918196.319 ± 785895.3258 ns 27% min 2085633.232 ns max 4153103.423 ns 6604.8 ± 592.6828494 9% min 5888 max 7680 563.2 ± 102.4 18.2% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 10 4543764.956 ± 456196.9453 ns 10.1% min 3730631.31 ns max 5131221.225 ns 7168 ± 428.3699336 6% min 6656 max 7680 563.2 ± 102.4 18.2% min 512 max 768 768 ± 0 0% min 768 max 768 actorPairs: 10 Lazy 2051424.168 ± 140223.7769 ns 6.9% min 1904574.184 ns max 2244642.028 ns 6195.2 ± 376.2416245 6.1% min 5632 max 6656 512 ± 0 0% min 512 max 512 768 ± 0 0% min 768 max 768 actorPairs: 10 Tail 2718176.14 ± 633434.1068 ns 23.4% min 2003947.665 ns max 3487669.574 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 2832069.428 ± 455475.6358 ns 16.1% min 2432316.752 ns max 3475317.304 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 Lazy 1778036.533 ± 394599.9217 ns 22.2% min 1102805.268 ns max 2160694.1 ns 5632 ± 0 0% min 5632 max 5632 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 11 Tail 3149232.194 ± 697626.1958 ns 22.2% min 1821659.705 ns max 3778188.003 ns 6144 ± 0 0% min 6144 max 6144 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 4059853.453 ± 571633.2894 ns 14.1% min 3015405.471 ns max 4582832.733 ns 6144 ± 0 0% min 6144 max 6144 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 Lazy 1622462.276 ± 291252.8115 ns 18% min 1119967.938 ns max 1928886.797 ns 6092.8 ± 102.4 1.7% min 5888 max 6144 460.8 ± 102.4 22.3% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 12 Tail 1845247.801 ± 44501.4169 ns 2.5% min 1794654.713 ns max 1913370.027 ns 6656 ± 0 0% min 6656 max 6656 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 13 2774939.45 ± 562453.0111 ns 20.3% min 2164695.582 ns max 3699102.598 ns 6656 ± 0 0% min 6656 max 6656 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 13 Lazy 1415699.52 ± 307762.5184 ns 21.8% min 1034662.794 ns max 1961939.119 ns 6656 ± 0 0% min 6656 max 6656 512 ± 0 0% min 512 max 512 512 ± 0 0% min 512 max 512 actorPairs: 13 Tail 2957740.024 ± 354628.4492 ns 12% min 2578678.579 ns max 3547618.633 ns 7065.6 ± 125.4138748 1.8% min 6912 max 7168 409.6 ± 125.4138748 30.7% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 14 2588316.618 ± 323317.9896 ns 12.5% min 1951384.107 ns max 2855281.031 ns 7116.8 ± 102.4 1.5% min 6912 max 7168 460.8 ± 102.4 22.3% min 256 max 512 512 ± 0 0% min 512 max 512 actorPairs: 14 Lazy 1848277.18 ± 214005.9221 ns 11.6% min 1474785.734 ns max 2068402.4 ns 6246.4 ± 347.2552951 5.6% min 5632 max 6656 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 14 Tail 2942356.971 ± 369771.1526 ns 12.6% min 2473720.865 ns max 3496075.493 ns 4608 ± 428.3699336 9.3% min 4096 max 5120 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 15 3318813.443 ± 361936.4731 ns 11% min 2646442.155 ns max 3653387.268 ns 5273.6 ± 597.089474 11.4% min 4608 max 6400 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 15 Lazy 989963.4721 ± 33118.63217 ns 3.4% min 939262.7288 ns max 1029046.485 ns 5171.2 ± 921.6 17.9% min 4096 max 6656 256 ± 0 0% min 256 max 256 512 ± 0 0% min 512 max 512 actorPairs: 15 Tail 2029181.26 ± 305923.8864 ns 15.1% min 1771897.787 ns max 2447329.327 ns 4096 ± 0 0% min 4096 max 4096 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 16 3137985.945 ± 632967.6027 ns 20.2% min 1932222.702 ns max 3745938.583 ns 4096 ± 0 0% min 4096 max 4096 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 16 Lazy 1228011.163 ± 272411.9384 ns 22.2% min 1001960.3 ns max 1749460.618 ns 4096 ± 0 0% min 4096 max 4096 256 ± 0 0% min 256 max 256 256 ± 0 0% min 256 max 256 actorPairs: 16 Tail >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> Mvp::OpenIdConnectHandleNullResponseFromProtectedResource [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateNotFoundCookie [GOOD] >> Mvp::OpenIdConnectSessionServiceCreateGetWrongStateAndWrongCookie [GOOD] >> Mvp::OidcImpersonationStartFlow [GOOD] >> Mvp::OidcImpersonationStartNeedServiceAccountId >> Mvp::OidcImpersonationStartNeedServiceAccountId [GOOD] >> Mvp::OidcImpersonationStopFlow [GOOD] >> Mvp::OidcImpersonatedAccessToProtectedResource [GOOD] >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestChunkPriorityBlock [GOOD] Test command err: 2025-07-08T11:54:29.269949Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.270738Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8256604540494288337 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 1658880 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:54:29.419585Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.419757Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 2026781248763391002 MagicNextLogChunkReference: 1278335754685241423 MagicLogChunk: 5146719963434977475 MagicDataChunk: 10907582310179018116 MagicSysLogChunk: 8270714529021180203 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669390904 (2025-07-08T11:54:29.390904Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.421080Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.422406Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.422633Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.423094Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.518935Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1239408 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.551834Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.552016Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 15194719720872324422 MagicNextLogChunkReference: 2134703158674536682 MagicLogChunk: 14020970959801922230 MagicDataChunk: 8190412605595958485 MagicSysLogChunk: 15708311238075946074 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669534173 (2025-07-08T11:54:29.534173Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.552937Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.553459Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.553477Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.553720Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.651271Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1571403 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:32.753366Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:32.753911Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 6870294198032277311 MagicNextLogChunkReference: 73704218046166693 MagicLogChunk: 2508421730275251030 MagicDataChunk: 16122092107756458993 MagicSysLogChunk: 7683137865422118925 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975672701614 (2025-07-08T11:54:32.701614Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:32.755742Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:32.756561Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:32.756611Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:32.756982Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:32.857151Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1209580 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:33.694083Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:33.695194Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 16389755575869226475 MagicNextLogChunkReference: 8852536554015486008 MagicLogChunk: 17812186185027478923 MagicDataChunk: 5234484930009961199 MagicSysLogChunk: 13411964659425803483 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975673645171 (2025-07-08T11:54:33.645171Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:33.696215Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:33.696689Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:33.696712Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:33.696924Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:33.793831Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1108096 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:35.368050Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:35.368264Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 2097152000 bytes (2 GB) Guid: 18278021149129573582 MagicNextLogChunkReference: 2412408148160062653 MagicLogChunk: 13365242643054021959 MagicDataChunk: 8104150553388186382 MagicSysLogChunk: 14164336638428399667 MagicFormatChunk: 17332287817462050952 ChunkSize: 4194304 bytes (4 MB) SectorSize: 4096 SysLogSectorCount: 48 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975675303066 (2025-07-08T11:54:35.303066Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:35.369253Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 46 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# ... 2 PDiskId# 1 2025-07-08T11:54:38.473363Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:1278} PDiskId# 1 Can't reserve 1000 chunks for ownerId# 3 sharedFree# 752 ownerFree# 511 estimatedColor after allocation# BLACK occupancy after allocation# 1.949514563 Marker# BPD20 PDiskId# 1 2025-07-08T11:54:38.568239Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:38.573161Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 12517250206743715826 MagicNextLogChunkReference: 6159678672696122030 MagicLogChunk: 17031654580460239247 MagicDataChunk: 3466296775385129824 MagicSysLogChunk: 5886896536699923070 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975678532963 (2025-07-08T11:54:38.532963Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:38.578015Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:38.581282Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:38.581306Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:38.582056Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:38.796837Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:38.825096Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 6494988812534324848 MagicNextLogChunkReference: 10045828241328920160 MagicLogChunk: 16490251150602795867 MagicDataChunk: 13138544746575016007 MagicSysLogChunk: 13445689878499766914 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975678715146 (2025-07-08T11:54:38.715146Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:38.826366Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:38.827150Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:38.827169Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:38.827582Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:38.895589Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1715868 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:38.916035Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:38.916257Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 6494988812534324848 MagicNextLogChunkReference: 10045828241328920160 MagicLogChunk: 16490251150602795867 MagicDataChunk: 13138544746575016007 MagicSysLogChunk: 13445689878499766914 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975678715146 (2025-07-08T11:54:38.715146Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:38.917320Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1692348 NonceLog# 1715868 NonceData# 1920001} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:38.917772Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:38.917794Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2025-07-08T11:54:38.917807Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:38.918069Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:39.015427Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:39.015500Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:922} SendChunkReadErrorPDiskId# 1 chunk owned by the system for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560002094 PDiskId# 1 2025-07-08T11:54:39.077810Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:39.078117Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 9262761968724184381 MagicNextLogChunkReference: 10800621445828399235 MagicLogChunk: 4101030111389228274 MagicDataChunk: 9381925770039956045 MagicSysLogChunk: 17727452893732086914 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975679057342 (2025-07-08T11:54:39.057342Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:39.079141Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:39.079624Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:39.079656Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:39.079918Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:39.177425Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1892044 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:39.229515Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:39.229679Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 7075045556058408750 MagicNextLogChunkReference: 13920274853641717450 MagicLogChunk: 14204770946125235580 MagicDataChunk: 8325303601204136027 MagicSysLogChunk: 11088864211955864701 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975679210778 (2025-07-08T11:54:39.210778Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:39.230806Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:39.231470Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:39.231492Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:39.231747Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:39.328718Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1184684 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/core/ut/unittest >> Mvp::TokenatorRefreshMetadataTokenGood [GOOD] Test command err: 2025-07-08T11:54:34.407426Z :MVP DEBUG: Refreshing token metadataTokenName 2025-07-08T11:54:34.407550Z :MVP DEBUG: Updating metadata token 2025-07-08T11:54:34.440213Z :MVP DEBUG: Refreshing token metadataTokenName 2025-07-08T11:54:34.440307Z :MVP DEBUG: Updating metadata token 2025-07-08T11:54:39.440441Z :MVP DEBUG: Refreshing token metadataTokenName 2025-07-08T11:54:39.440547Z :MVP DEBUG: Updating metadata token >> TYardTest::TestMultiYardStartingPoints [GOOD] >> TYardTest::TestMultiYardLogMultipleWriteRead >> TYardTest::TestChunkWriteReadMultipleWithHddSectorMap [GOOD] >> TYardTest::TestChunkWriteReadWhole >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut >> WaitingBenchs::WakingUpTest [GOOD] >> TYardTest::TestChunkWriteReadWhole [GOOD] >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap >> TYardTest::TestMultiYardLogMultipleWriteRead [GOOD] >> TYardTest::TestSysLogOverwrite |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |82.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/query/ut/unittest >> TQueryResultSizeTrackerTest::SerializeDeserializeMaxPtotobufSize [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/mvp/oidc_proxy/ut/unittest >> Mvp::OidcImpersonatedAccessNotAuthorized [GOOD] Test command err: 2025-07-08T11:54:39.359655Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.359823Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.380439Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.380481Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.390583Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.390711Z :MVP DEBUG: Incoming response for protected resource: 204 2025-07-08T11:54:39.391998Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.392082Z :MVP DEBUG: Incoming response for protected resource: 204 2025-07-08T11:54:39.407468Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.407573Z :MVP DEBUG: Incoming response for protected resource: 204 2025-07-08T11:54:39.409335Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.409432Z :MVP DEBUG: Incoming response for protected resource: 204 2025-07-08T11:54:39.485782Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.485808Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.485927Z :MVP DEBUG: Incoming response for protected resource: 400 2025-07-08T11:54:39.485935Z :MVP DEBUG: Try to send request to HTTPS port 2025-07-08T11:54:39.485939Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.486004Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.487395Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.487412Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.487453Z :MVP DEBUG: Incoming response for protected resource: 400 2025-07-08T11:54:39.514718Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.514744Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.514786Z :MVP DEBUG: Incoming response for protected resource: 307 2025-07-08T11:54:39.519936Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.519963Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.519997Z :MVP DEBUG: Incoming response for protected resource: 302 2025-07-08T11:54:39.522123Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.522141Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.522168Z :MVP DEBUG: Incoming response for protected resource: 302 2025-07-08T11:54:39.523986Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.524001Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.524025Z :MVP DEBUG: Incoming response for protected resource: 302 2025-07-08T11:54:39.525419Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.525436Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.525458Z :MVP DEBUG: Incoming response for protected resource: 302 2025-07-08T11:54:39.530896Z :MVP DEBUG: Start OIDC process 2025-07-08T11:54:39.531206Z :MVP DEBUG: Using cookie (__Host_session_cookie_79632E6F617574682E7964622D766965776572: c2Vz****aWU= (CE0CB168)) 2025-07-08T11:54:39.531216Z :MVP DEBUG: Exchange session token 2025-07-08T11:54:39.531239Z :MVP DEBUG: Getting access token: 200 OK 2025-07-08T11:54:39.531250Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.531259Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.542540Z :MVP DEBUG: SessionService.Check(): 401 2025-07-08T11:54:39.548983Z :MVP DEBUG: SessionService.Check(): 400 2025-07-08T11:54:39.549161Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.549233Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.550892Z :MVP DEBUG: SessionService.Create(): OK 2025-07-08T11:54:39.552413Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.552432Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.552464Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.559106Z :MVP DEBUG: SessionService.Check(): 400 2025-07-08T11:54:39.559261Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.559322Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.560688Z :MVP DEBUG: SessionService.Create(): OK 2025-07-08T11:54:39.561750Z :MVP DEBUG: SessionService.Check(): OK 2025-07-08T11:54:39.561765Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.561791Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.595009Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.595063Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-07-08T11:54:39.614180Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.614234Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-07-08T11:54:39.634145Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.634286Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.638504Z :MVP DEBUG: SessionService.Create(): 401 2025-07-08T11:54:39.669093Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.669202Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.672001Z :MVP DEBUG: SessionService.Create(): 400 2025-07-08T11:54:39.684548Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.684699Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.688093Z :MVP DEBUG: SessionService.Create(): 400 2025-07-08T11:54:39.692546Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.692669Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.694035Z :MVP DEBUG: SessionService.Create(): 412 2025-07-08T11:54:39.723865Z :MVP DEBUG: SessionService.Check(): 400 2025-07-08T11:54:39.725230Z :MVP DEBUG: SessionService.Check(): 400 2025-07-08T11:54:39.726236Z :MVP DEBUG: SessionService.Check(): 400 2025-07-08T11:54:39.743688Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.743830Z :MVP DEBUG: Can not process request to protected resource: GET /counters HTTP/1.1 Host: ydb.viewer.page Accept: */* Accept-Encoding: deflate Authorization: 2025-07-08T11:54:39.747218Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.747258Z :MVP DEBUG: Restore oidc context failed: Cannot find cookie ydb_oidc_cookie 2025-07-08T11:54:39.753520Z :MVP DEBUG: Restore oidc session 2025-07-08T11:54:39.753559Z :MVP DEBUG: Check state failed: Calculated digest is not equal expected digest 2025-07-08T11:54:39.791034Z :MVP DEBUG: Start impersonation process 2025-07-08T11:54:39.791069Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-07-08T11:54:39.791076Z :MVP DEBUG: Request impersonated token 2025-07-08T11:54:39.791112Z :MVP DEBUG: Incoming response from authorization server: 200 2025-07-08T11:54:39.791134Z :MVP DEBUG: Set impersonated cookie: (__Host_impersonated_cookie_636C69656E745F6964: aW1w****bg== (B126DD61)) 2025-07-08T11:54:39.808471Z :MVP DEBUG: Start impersonation process 2025-07-08T11:54:39.808499Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-07-08T11:54:39.836009Z :MVP DEBUG: Clear cookie: (__Host_impersonated_cookie_636C69656E745F6964) 2025-07-08T11:54:39.859101Z :MVP DEBUG: Start OIDC process 2025-07-08T11:54:39.859127Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-07-08T11:54:39.859132Z :MVP DEBUG: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-07-08T11:54:39.859136Z :MVP DEBUG: Exchange impersonated token 2025-07-08T11:54:39.859243Z :MVP DEBUG: Getting access token: 200 OK 2025-07-08T11:54:39.859258Z :MVP DEBUG: Forward user request bypass OIDC 2025-07-08T11:54:39.859276Z :MVP DEBUG: Incoming response for protected resource: 200 2025-07-08T11:54:39.874146Z :MVP DEBUG: Start OIDC process 2025-07-08T11:54:39.874172Z :MVP DEBUG: Using cookie (__Host_session_cookie_636C69656E745F6964: c2Vz****aWU= (CE0CB168)) 2025-07-08T11:54:39.874177Z :MVP DEBUG: Using cookie (__Host_impersonated_cookie_636C69656E745F6964: aW1w****ZQ== (1A20D8C0)) 2025-07-08T11:54:39.874182Z :MVP DEBUG: Exchange impersonated token 2025-07-08T11:54:39.874283Z :MVP DEBUG: Getting access token: 401 OK 2025-07-08T11:54:39.874288Z :MVP DEBUG: Getting access token: {"error": "bad_token"} 2025-07-08T11:54:39.874292Z :MVP DEBUG: Clear impersonated cookie (__Host_impersonated_cookie_636C69656E745F6964) and retry >> PushdownTest::NoFilter [GOOD] |82.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/static_validator/ut/example_configs/static_validator-ut-example_configs >> PushdownTest::Equal [GOOD] >> PushdownTest::NotEqualInt32Int64 [GOOD] >> PushdownTest::TrueCoalesce [GOOD] >> PushdownTest::CmpInt16AndInt32 [GOOD] >> PushdownTest::PartialAnd [GOOD] >> PushdownTest::PartialAndOneBranchPushdownable [GOOD] >> PushdownTest::NotNull [GOOD] >> PushdownTest::NotNullForDatetime [GOOD] >> PushdownTest::IsNull >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |82.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut >> TYardTest::TestChunkWriteReadWholeWithHddSectorMap [GOOD] >> PushdownTest::IsNull [GOOD] >> TYardTest::TestSysLogOverwrite [GOOD] >> PushdownTest::StringFieldsNotSupported [GOOD] >> PushdownTest::StringFieldsNotSupported2 [GOOD] >> TYardTest::TestHttpInfo >> TYardTest::TestUpsAndDownsAtTheBoundary ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> WaitingBenchs::WakingUpTest [GOOD] Test command err: Actor [1:7524678195816235711:2048] registered alias [1:7524678195816235712:2048] Actor [1:7524678195816235714:2050] sending TEvPing to [1:7524678195816235712:2048] Actor [1:7524678195816235711:2048] received TEvPing from [1:7524678195816235714:2050] Actor [1:7524678195816235713:2049] sending TEvPing to [1:7524678195816235712:2048] Actor [1:7524678195816235714:2050] received TEvPong from [1:7524678195816235711:2048] Actor [1:7524678195816235711:2048] received TEvGone from [1:7524678195816235714:2050] Actor [1:7524678195816235713:2049] received TEvUndelivered from [1:7524678195816235712:2048] Actor [1:7524678195816235711:2048] received TEvGone from [1:7524678195816235713:2049] Actor [1:3:2050] registered alias [1:4:2050] Actor [1:5:2051] sending TEvPing to [1:4:2050] Actor [1:6:2052] sending TEvPing to [1:4:2050] Actor [1:3:2050] received TEvPing from [1:5:2051] Actor [1:5:2051] received TEvPong from [1:3:2050] Actor [1:6:2052] received TEvUndelivered from [1:4:2050] Actor [1:3:2050] received TEvGone from [1:5:2051] Actor [1:3:2050] received TEvGone from [1:6:2052] Throw yexception Handle yexception Throw std::exception Handle std::exception from event with type 65538 Throw trash Handle trash 0.01921101592 38.33142 AvgAwakeningCycles: 211798.9644 AvgAwakeningUs: 106.1498185 AvgSleep20usCycles:40282.64472 AvgSleep20usUs:20.18893453 AvgWakingUpCycles: 5905.44188 AvgWakingUpUs: 2.959700892 AwakeningHist: [0us - 1us] 2 [1us - 2us] 1 [2us - 3us] 12 [3us - 4us] 53 [4us - 5us] 10894 [5us - 6us] 14943 [6us - 7us] 38360 [7us - 8us] 10643 [8us - 9us] 7151 [9us - 10us] 3076 [10us - 11us] 1525 [11us - 12us] 1139 [12us - 13us] 934 [13us - 14us] 819 [14us - 15us] 699 [15us - 16us] 591 [16us - 17us] 544 [17us - 18us] 473 [18us - 19us] 396 [19us - 20us] 365 [20us - 21us] 340 [21us - 22us] 251 [22us - 23us] 205 [23us - 24us] 186 [24us - 25us] 167 [25us - 26us] 121 [26us - 27us] 120 [27us - 28us] 94 [28us - 29us] 96 [29us - 30us] 80 [30us - 31us] 80 [31us - 32us] 71 [32us - 33us] 56 [33us - 34us] 65 [34us - 35us] 69 [35us - 36us] 45 [36us - 37us] 46 [37us - 38us] 48 [38us - 39us] 36 [39us - 40us] 31 [40us - 41us] 34 [41us - 42us] 30 [42us - 43us] 40 [43us - 44us] 22 [44us - 45us] 26 [45us - 46us] 31 [46us - 47us] 21 [47us - 48us] 23 [48us - 49us] 17 [49us - 50us] 26 [50us - 51us] 16 [51us - 52us] 24 [52us - 53us] 18 [53us - 54us] 25 [54us - 55us] 22 [55us - 56us] 19 [56us - 57us] 14 [57us - 58us] 21 [58us - 59us] 13 [59us - 60us] 13 [60us - 61us] 13 [61us - 62us] 20 [62us - 63us] 15 [63us - 64us] 16 [64us - 65us] 15 [65us - 66us] 7 [66us - 67us] 17 [67us - 68us] 18 [68us - 69us] 16 [69us - 70us] 14 [70us - 71us] 12 [71us - 72us] 11 [72us - 73us] 10 [73us - 74us] 11 [74us - 75us] 16 [75us - 76us] 9 [76us - 77us] 10 [77us - 78us] 5 [78us - 79us] 15 [79us - 80us] 5 [80us - 81us] 8 [81us - 82us] 13 [82us - 83us] 5 [83us - 84us] 12 [84us - 85us] 9 [85us - 86us] 12 [86us - 87us] 9 [87us - 88us] 16 [88us - 89us] 4 [89us - 90us] 9 [90us - 91us] 12 [91us - 92us] 12 [92us - 93us] 13 [93us - 94us] 13 [94us - 95us] 13 [95us - 96us] 4 [96us - 97us] 6 [97us - 98us] 10 [98us - 99us] 11 [99us - 100us] 9 [100us - 101us] 9 [101us - 102us] 4 [102us - 103us] 8 [103us - 104us] 7 [104us - 105us] 6 [105us - 106us] 7 [106us - 107us] 11 [107us - 108us] 14 [108us - 109us] 6 [109us - 110us] 11 [110us - 111us] 9 [111us - 112us] 3 [112us - 113us] 3 [113us - 114us] 6 [114us - 115us] 8 [115us - 116us] 12 [116us - 117us] 5 [117us - 118us] 7 [118us - 119us] 13 [119us - 120us] 7 [120us - 121us] 9 [121us - 122us] 3 [122us - 123us] 8 [123us - 124us] 7 [124us - 125us] 4 [125us - 126us] 5 [126us - 127us] 5 [127us - ...] 4066 WakingUpHist: [0us - 1us] 95 [1us - 2us] 2406 [2us - 3us] 67379 [3us - 4us] 22489 [4us - 5us] 4546 [5us - 6us] 1518 [6us - 7us] 625 [7us - 8us] 326 [8us - 9us] 192 [9us - 10us] 114 [10us - 11us] 72 [11us - 12us] 45 [12us - 13us] 32 [13us - 14us] 20 [14us - 15us] 27 [15us - 16us] 9 [16us - 17us] 15 [17us - 18us] 13 [18us - 19us] 9 [19us - 20us] 4 [20us - 21us] 2 [22us - 23us] 2 [23us - 24us] 2 [24us - 25us] 2 [25us - 26us] 2 [26us - 27us] 6 [27us - 28us] 2 [28us - 29us] 4 [29us - 30us] 1 [30us - 31us] 2 [31us - 32us] 2 [32us - 33us] 4 [33us - 34us] 1 [34us - 35us] 1 [35us - 36us] 1 [36us - 37us] 1 [38us - 39us] 1 [40us - 41us] 1 [42us - 43us] 1 [44us - 45us] 2 [48us - 49us] 1 [50us - 51us] 2 [52us - 53us] 2 [54us - 55us] 1 [56us - 57us] 1 [57us - 58us] 1 [59us - 60us] 1 [60us - 61us] 2 [61us - 62us] 1 [66us - 67us] 1 [67us - 68us] 2 [85us - 86us] 1 [87us - 88us] 1 [92us - 93us] 1 [97us - 98us] 1 [127us - ...] 5 |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/workload/tpch/ut/ydb-library-workload-tpch-ut |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut >> TYardTest::TestHttpInfo [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp >> TBsOther1::ChaoticParallelWrite [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload >> GroupStress::Test [GOOD] >> TYardTest::TestHttpInfoFileDoesntExist [GOOD] >> TYardTest::TestFirstRecordToKeep |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |82.5%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestFirstRecordToKeep [GOOD] >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/test_connection/ut/unittest |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp >> StaticConfigExamples::MIRROR_3_DC_NODES [GOOD] >> StaticConfigExamples::MIRROR_3_DC_NODES_IN_MEMORY [GOOD] >> StaticConfigExamples::SingleNodeWithFile [GOOD] >> StaticConfigExamples::SINGLE_NODE_IN_MEMORY [GOOD] >> StaticConfigExamples::BLOCK42 [GOOD] >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] |82.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> TpchQueries::ScaleFactor [GOOD] >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] >> AuthConfigValidation::AcceptValidPasswordComplexity [GOOD] >> AuthConfigValidation::CannotAcceptInvalidPasswordComplexity [GOOD] >> AuthConfigValidation::AcceptValidAccountLockoutConfig [GOOD] >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/cppcoro/corobenchmark/corobenchmark |82.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_group/unittest >> GroupStress::Test [GOOD] >> TSectorMapPerformance::TestSSD1960GBRead1000MBOnFirstSector [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector >> TYardTest::TestHugeChunkAndLotsOfTinyAsyncLogOrder [GOOD] >> TYardTest::TestDamagedFirstRecordToKeep ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest >> PushdownTest::StringFieldsNotSupported2 [GOOD] Test command err: Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (Bool '"true") $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2025-07-08 11:54:40.776 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.777 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (Bool '"true") $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.777 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2025-07-08 11:54:40.777 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_load_meta.cpp:90: Loading table meta for: `test_cluster`.`test_table` 2025-07-08 11:54:40.777 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (Bool '"true") $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (Bool '"true")) (let $2 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($5) $1))) (let $3 (DataSink '"result")) (let $4 (ResWrite! (Left! $2) $3 (Key) (FlatMap (Right! $2) (lambda '($6) (OptionalIf $1 $6))) '('('type)))) (return (Commit! $4 $3)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_co_simple1.cpp:978: OptionalIf over Bool 'true 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (Just $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_co_simple1.cpp:2031: FlatMap with Just 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.778 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (Right! $1) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.779 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2025-07-08 11:54:40.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2025-07-08 11:54:40.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResWrite! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)))) (return (Commit! $2 $1)) ) 2025-07-08 11:54:40.779 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [RESULT] yql_result_provider.cpp:773: ResPull 2025-07-08 11:54:40.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-07-08 11:54:40.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-07-08 11:54:40.779 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (ResPull! world $1 (Key) (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($3) (Bool '"true")))) '('('type)) '"generic")) (return (Commit! $2 $1)) ) 2025-07-08 11:54:40.779 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_dq_integration.cpp:183: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2025-07-08 11:54:40.780 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2025-07-08 11:54:40.780 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($32) (Bool '"true")))) (let $4 (DataType 'Bool)) (let $5 (DataType 'Date)) (let $6 (DataType 'Datetime)) (let $7 (DataType 'Double)) (let $8 (DataType 'DyNumber)) (let $9 (DataType 'Float)) (let $10 (DataType 'Int16)) (let $11 (DataType 'Int32)) (let $12 (DataType 'Int64)) (let $13 (DataType 'Int8)) (let $14 (DataType 'Interval)) (let $15 (DataType 'Json)) (let $16 (DataType 'JsonDocument)) (let $17 (DataType 'String)) (let $18 (DataType 'Timestamp)) (let $19 (DataType 'TzDate)) (let $20 (DataType 'TzDatetime)) (let $21 (DataType 'TzTimestamp)) (let $22 (DataType 'Uint16)) (let $23 (DataType 'Uint32)) (let $24 (DataType 'Uint64)) (let $25 (DataType 'Uint8)) (let $26 (DataType 'Utf8)) (let $27 (DataType 'Uuid)) (let $28 (DataType 'Yson)) (let $29 (StructType '('"col_bool" $4) '('"col_date" $5) '('"col_datetime" $6) '('"col_double" $7) '('"col_dynumber" $8) '('"col_float" $9) '('"col_int16" $10) '('"col_int32" $11) '('"col_int64" $12) '('"col_int8" $13) '('"col_interval" $14) '('"col_json" $15) '('"col_json_document" $16) '('"col_optional_bool" (OptionalType $4)) '('"col_optional_date" (OptionalType $5)) '('"col_optional_datetime" (OptionalType $6)) '('"col_optional_double" (OptionalType $7)) '('"col_optional_dynumber" (OptionalType $8)) '('"col_optional_float" (OptionalType $9)) '('"col_optional_int16" (OptionalType $10)) '('"col_optional_int32" (OptionalType $11)) '('"col_optional_int64" (OptionalType $12)) '('"col_optional_int8" (OptionalType $13)) '('"col_optional_interval" (OptionalType $14)) '('"col_optional_json" (OptionalType $15)) '('"col_optional_json_document" (OptionalType $16)) '('"col_optional_string" (OptionalType $17)) '('"col_optional_timestamp" (OptionalType $18)) '('"col_optional_tz_date" (OptionalT ... right_value { column: "col_optional_utf8" } } } 2025-07-08 11:54:40.945 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_settings.cpp:38: GenericConfiguration::AddCluster: name = test_cluster, kind = POSTGRESQL, database name = database, database id = , endpoint = { host: "host" port: 42 }, use tls = 0, protocol = NATIVE Initial program: ( (let $data_source (DataSource '"generic" '"test_cluster")) (let $empty_lambda (lambda '($arg) (Bool '"true"))) (let $table (MrTableConcat (Key '('table (String '"test_table")))) ) (let $read (Read! world $data_source $table)) (let $map_lambda (lambda '($row) (OptionalIf (!= (Member $row '"col_string") (String '"value") ) $row ) )) (let $filtered_data (FlatMap (Right! $read) $map_lambda)) (let $resulte_data_sink (DataSink '"result")) (let $result (ResWrite! (Left! $read) $resulte_data_sink (Key) $filtered_data '('('type)))) (return (Commit! $result $resulte_data_sink)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (Read! world (DataSource '"generic" '"test_cluster") (MrTableConcat (Key '('table (String '"test_table")))))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($4) (OptionalIf (!= (Member $4 '"col_string") (String '"value")) $4))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_io_discovery.cpp:55: discovered cluster name: test_cluster 2025-07-08 11:54:40.957 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_load_meta.cpp:90: Loading table meta for: `test_cluster`.`test_table` 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.957 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (Bool '"true")))) (let $2 (DataSink '"result")) (let $3 (ResWrite! (Left! $1) $2 (Key) (FlatMap (Right! $1) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") (String '"value")) $5))) '('('type)))) (return (Commit! $3 $2)) ) 2025-07-08 11:54:40.958 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_optimize.cpp:135: PhysicalOptimizer-TrimReadWorld 2025-07-08 11:54:40.958 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [default] physical_opt.cpp:76: Push filter lambda: ( (return (lambda '($1) (!= (Member $1 '"col_string") (String '"value")))) ) 2025-07-08 11:54:40.958 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_optimize.cpp:135: PhysicalOptimizer-PushFilterToReadTable 2025-07-08 11:54:40.958 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-07-08 11:54:40.958 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-07-08 11:54:40.958 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Expr to optimize: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-07-08 11:54:40.958 TRACE yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_physical_opt.cpp:136: Push filter. Lambda is already not empty 2025-07-08 11:54:40.958 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Optimized expr: ( (let $1 (DataSink '"result")) (let $2 (String '"value")) (let $3 (ResWrite! world $1 (Key) (FlatMap (Right! (GenReadTable! world (DataSource '"generic" '"test_cluster") (GenTable '"test_table") (Void) (lambda '($4) (!= (Member $4 '"col_string") $2)))) (lambda '($5) (OptionalIf (!= (Member $5 '"col_string") $2) $5))) '('('type)))) (return (Commit! $3 $1)) ) 2025-07-08 11:54:40.958 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_generic_dq_integration.cpp:183: Filling source settings: cluster: test_cluster, table: test_table, endpoint: host: "host" port: 42 2025-07-08 11:54:40.959 INFO yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [generic] yql_optimize.cpp:135: BuildGenericDqSourceSettings 2025-07-08 11:54:40.959 DEBUG yql-providers-generic-provider-ut-pushdown(pid=53357, tid=0x00007FB608866A40) [core] yql_out_transformers.cpp:62: Built settings: ( (let $1 (DataSink '"result")) (let $2 '('"col_bool" '"col_date" '"col_datetime" '"col_double" '"col_dynumber" '"col_float" '"col_int16" '"col_int32" '"col_int64" '"col_int8" '"col_interval" '"col_json" '"col_json_document" '"col_optional_bool" '"col_optional_date" '"col_optional_datetime" '"col_optional_double" '"col_optional_dynumber" '"col_optional_float" '"col_optional_int16" '"col_optional_int32" '"col_optional_int64" '"col_optional_int8" '"col_optional_interval" '"col_optional_json" '"col_optional_json_document" '"col_optional_string" '"col_optional_timestamp" '"col_optional_tz_date" '"col_optional_tz_datetime" '"col_optional_tz_timestamp" '"col_optional_uint16" '"col_optional_uint32" '"col_optional_uint64" '"col_optional_uint8" '"col_optional_utf8" '"col_optional_uuid" '"col_optional_yson" '"col_string" '"col_timestamp" '"col_tz_date" '"col_tz_datetime" '"col_tz_timestamp" '"col_uint16" '"col_uint32" '"col_uint64" '"col_uint8" '"col_utf8" '"col_uuid" '"col_yson")) (let $3 (String '"value")) (let $4 (GenSourceSettings world '"test_cluster" '"test_table" (SecureParam '"cluster:default_test_cluster") $2 (lambda '($33) (!= (Member $33 '"col_string") $3)))) (let $5 (DataType 'Bool)) (let $6 (DataType 'Date)) (let $7 (DataType 'Datetime)) (let $8 (DataType 'Double)) (let $9 (DataType 'DyNumber)) (let $10 (DataType 'Float)) (let $11 (DataType 'Int16)) (let $12 (DataType 'Int32)) (let $13 (DataType 'Int64)) (let $14 (DataType 'Int8)) (let $15 (DataType 'Interval)) (let $16 (DataType 'Json)) (let $17 (DataType 'JsonDocument)) (let $18 (DataType 'String)) (let $19 (DataType 'Timestamp)) (let $20 (DataType 'TzDate)) (let $21 (DataType 'TzDatetime)) (let $22 (DataType 'TzTimestamp)) (let $23 (DataType 'Uint16)) (let $24 (DataType 'Uint32)) (let $25 (DataType 'Uint64)) (let $26 (DataType 'Uint8)) (let $27 (DataType 'Utf8)) (let $28 (DataType 'Uuid)) (let $29 (DataType 'Yson)) (let $30 (StructType '('"col_bool" $5) '('"col_date" $6) '('"col_datetime" $7) '('"col_double" $8) '('"col_dynumber" $9) '('"col_float" $10) '('"col_int16" $11) '('"col_int32" $12) '('"col_int64" $13) '('"col_int8" $14) '('"col_interval" $15) '('"col_json" $16) '('"col_json_document" $17) '('"col_optional_bool" (OptionalType $5)) '('"col_optional_date" (OptionalType $6)) '('"col_optional_datetime" (OptionalType $7)) '('"col_optional_double" (OptionalType $8)) '('"col_optional_dynumber" (OptionalType $9)) '('"col_optional_float" (OptionalType $10)) '('"col_optional_int16" (OptionalType $11)) '('"col_optional_int32" (OptionalType $12)) '('"col_optional_int64" (OptionalType $13)) '('"col_optional_int8" (OptionalType $14)) '('"col_optional_interval" (OptionalType $15)) '('"col_optional_json" (OptionalType $16)) '('"col_optional_json_document" (OptionalType $17)) '('"col_optional_string" (OptionalType $18)) '('"col_optional_timestamp" (OptionalType $19)) '('"col_optional_tz_date" (OptionalType $20)) '('"col_optional_tz_datetime" (OptionalType $21)) '('"col_optional_tz_timestamp" (OptionalType $22)) '('"col_optional_uint16" (OptionalType $23)) '('"col_optional_uint32" (OptionalType $24)) '('"col_optional_uint64" (OptionalType $25)) '('"col_optional_uint8" (OptionalType $26)) '('"col_optional_utf8" (OptionalType $27)) '('"col_optional_uuid" (OptionalType $28)) '('"col_optional_yson" (OptionalType $29)) '('"col_string" $18) '('"col_timestamp" $19) '('"col_tz_date" $20) '('"col_tz_datetime" $21) '('"col_tz_timestamp" $22) '('"col_uint16" $23) '('"col_uint32" $24) '('"col_uint64" $25) '('"col_uint8" $26) '('"col_utf8" $27) '('"col_uuid" $28) '('"col_yson" $29))) (let $31 (DqSourceWrap $4 (DataSource '"generic" '"test_cluster") $30)) (let $32 (ResWrite! world $1 (Key) (FlatMap $31 (lambda '($34) (OptionalIf (!= (Member $34 '"col_string") $3) $34))) '('('type)))) (return (Commit! $32 $1)) ) Dq source filter settings: filter_typed { comparison { operation: NE left_value { column: "col_string" } right_value { typed_value { type { type_id: STRING } value { bytes_value: "value" } } } } } >> TYardTest::TestUpsAndDownsAtTheBoundary [GOOD] >> TYardTest::TestUnflushedChunk >> replay::import_test [GOOD] |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/static_validator/ut/example_configs/unittest >> StaticConfigExamples::MIRROR_3_DC_9_NODES [GOOD] |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |82.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsOther2::ChaoticParallelWrite_SkeletonFrontQueuesOverload [GOOD] |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/workload/tpch/ut/unittest >> TpchQueries::ScaleFactor [GOOD] |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/validation/auth_config_validator_ut/unittest >> AuthConfigValidation::CannotAcceptInvalidAccountLockoutConfig [GOOD] |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp >> TYardTest::TestUnflushedChunk [GOOD] >> TYardTest::TestRedZoneSurvivability |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |82.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/ydb_serializable/replay/import_test >> replay::import_test [GOOD] |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> TYardTest::TestDamagedFirstRecordToKeep [GOOD] |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> TYardTest::TestDamageAtTheBoundary |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> TYardTest::TestRedZoneSurvivability [GOOD] >> TYardTest::TestSlay >> ydb-tests-functional-compatibility::import_test [GOOD] >> TBsVDiskDefrag::Defrag50PercentGarbage [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh >> TYardTest::TestSlay [GOOD] >> TYardTest::TestSlayRace |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |82.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/ydb-tests-functional-config >> TYardTest::TestSlayRace [GOOD] >> TYardTest::TestSlayRecreate |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/interconnect/ut_huge_cluster/ydb-library-actors-interconnect-ut_huge_cluster >> TYardTest::TestSlayRecreate [GOOD] >> TYardTest::TestSlayLogWriteRaceActor >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |82.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/compatibility/import_test >> ydb-tests-functional-compatibility::import_test [GOOD] |82.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyFresh [GOOD] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction |82.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> TYardTest::TestSlayLogWriteRaceActor [GOOD] >> TYardTest::TestMultiYardHarakiri >> TBsVDiskGC::TGCManyVPutsDelTabletTest [GOOD] >> TBsVDiskManyPutGet::ManyPutGet |82.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_checks/yaml_config-validator-ut-validator_checks |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut >> TYardTest::TestLogWriteCutEqual [GOOD] >> TYardTest::TestLogWriteCutEqualRandomWait >> SchedulerActor::MediumEvents [GOOD] >> TSectorMapPerformance::TestSSD1960GBWrite1000MBOnFirstSector [GOOD] >> TYardTest::Test3AsyncLog >> HugeCluster::AllToAll >> ydb-tests-functional-minidumps::import_test [GOOD] >> TYardTest::Test3AsyncLog [GOOD] >> TYardTest::Test3HugeAsyncLog |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> SchedulerActor::MediumEvents [GOOD] Test command err: ... there have been 19 switches to consumer mode |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |82.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TYardTest::TestMultiYardHarakiri [GOOD] >> TYardTest::TestStartingPointReboots |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |82.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut >> Checks::ErrorInCheck [GOOD] >> Checks::MapValidation [GOOD] >> Checks::BasicStringChecks [GOOD] >> Checks::OpaqueMaps [GOOD] >> Checks::IntArrayValidation [GOOD] >> Checks::BasicIntChecks [GOOD] >> DynamicProxy::RaceCheck1 [GOOD] >> DynamicProxy::RaceCheck10 >> YdbVersion::DefaultSameVersion [GOOD] >> OldFormat::OldNbs [GOOD] >> YdbVersion::StoredReadableBy [GOOD] >> YdbVersion::DefaultPrevMajor [GOOD] >> YdbVersion::CurrentCanLoadFrom [GOOD] >> YdbVersion::OldNbsStored [GOOD] >> OldFormat::UnexpectedTrunk [GOOD] >> YdbVersion::DefaultHotfix [GOOD] >> YdbVersion::DefaultPrevYear [GOOD] >> YdbVersion::CurrentCanLoadFromIncompatible [GOOD] >> YdbVersion::StoredWithRules [GOOD] >> VersionParser::Basic [GOOD] >> YdbVersion::TrunkAndStable [GOOD] >> OldFormat::TooOld [GOOD] >> YdbVersion::NewNbsIncompatibleCurrent [GOOD] >> YdbVersion::DifferentYdbVersionsWithNBSRules [GOOD] >> DoubleIndexedTests::TestUpsertByBothKeys [GOOD] >> YdbVersion::Component [GOOD] >> YdbVersion::WithPatchAndWithoutPatch [GOOD] >> YdbVersion::CurrentStoresReadableBy [GOOD] >> OldFormat::Trunk [GOOD] >> DoubleIndexedTests::TestFind [GOOD] >> YdbVersion::DefaultOldMajor [GOOD] >> TBsLocalRecovery::StartStopNotEmptyDB [GOOD] >> DoubleIndexedTests::TestMerge [GOOD] >> YdbVersion::DefaultDifferentBuildIncompatible [GOOD] |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut >> TYardTest::TestDamageAtTheBoundary [GOOD] >> TBsLocalRecovery::WriteRestartRead >> YdbVersion::LimitOld [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskExtreme::Simple3Put1GetMissingKeyCompaction [GOOD] >> DoubleIndexedTests::TestUpsertBySingleKey [GOOD] >> TYardTest::TestDestroySystem Test command err: 2025-07-08T11:54:29.220864Z :BS_VDISK_PUT ERROR: VDISK[0:_:0:0:0]: TEvVPut: TabletID cannot be empty; id# [0:1:10:0:0:10:1] Marker# BSVS43 2025-07-08T11:54:29.921464Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake' 2025-07-08T11:54:29.921494Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921502Z :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2025-07-08T11:54:29.921510Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921509Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921515Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921516Z :BS_SKELETON ERROR: VDISK[0:_:0:0:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921519Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:1:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921522Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921524Z :BS_SKELETON ERROR: VDISK[0:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# PDiskId# 1 TEvLog error because PDisk State# Error, there is a terminal internal error in PDisk. Did you check EvYardInit result? Marker# BSY07 StateErrorReason# PDisk is in StateError, reason# Received TEvYardControl::Brake Marker# BSVSF03 2025-07-08T11:54:29.921527Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921528Z :BS_VDISK_OTHER ERROR: VDISK[0:_:0:3:1]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'IsValid' ErrorReason# '' 2025-07-08T11:54:29.921529Z :BS_SKELETON ERROR: VDISK[0:_:0:1:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921535Z :BS_SKELETON ERROR: VDISK[0:_:0:2:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921540Z :BS_SKELETON ERROR: VDISK[0:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921547Z :BS_SKELETON ERROR: VDISK[0:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921557Z :BS_SKELETON ERROR: VDISK[0:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 2025-07-08T11:54:29.921561Z :BS_SKELETON ERROR: VDISK[0:_:0:3:1]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites Marker# BSVSF03 >> OldFormat::DefaultRules [GOOD] |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> DoubleIndexedTests::TestErase [GOOD] >> YdbVersion::AcceptSpecificHotfixWithoutPatch [GOOD] >> YdbVersion::DefaultNewMajor [GOOD] >> YdbVersion::ForbiddenMinor [GOOD] >> TBsVDiskManyPutGet::ManyPutGet [GOOD] >> YdbVersion::DefaultNextYear [GOOD] >> YdbVersion::DefaultRulesWithExtraForbidden [GOOD] >> YdbVersion::DefaultDifferentBuild [GOOD] >> YdbVersion::OldNbsIncompatibleStored [GOOD] >> YdbVersion::ExtraAndForbidden [GOOD] >> TBsVDiskManyPutGet::ManyMultiSinglePutGet >> YdbVersion::OtherComponent [GOOD] >> YdbVersion::PrintCurrentVersionProto [GOOD] >> YdbVersion::CurrentCanLoadFromAllOlder [GOOD] >> YdbVersion::SomeRulesAndOtherForbidden [GOOD] >> YdbVersion::StoredReadableByIncompatible [GOOD] >> YdbVersion::YDBAndNbs [GOOD] >> YdbVersion::DefaultCompatible [GOOD] >> OldFormat::SameVersion [GOOD] >> YdbVersion::LimitNew [GOOD] >> YdbVersion::TrunkYDBAndNbs [GOOD] >> YdbVersion::NewNbsCurrent [GOOD] >> YdbVersion::CompatibleWithSelf [GOOD] >> YdbVersion::OneAcceptedVersion [GOOD] >> OldFormat::PrevYear [GOOD] >> YdbVersion::StoredWithRulesIncompatible [GOOD] >> YdbVersion::DefaultNextMajor [GOOD] |82.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/ydb-library-benchmarks-runner |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TYardTest::Test3HugeAsyncLog [GOOD] >> TYardTest::TestAllocateAllChunks >> TYardTest::TestDestroySystem [GOOD] >> TYardTest::TestCutMultipleLogChunks |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/accessor/composite/ut/ydb-core-formats-arrow-accessor-composite-ut |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> Checks::BasicIntChecks [GOOD] |81.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_double_indexed/unittest >> DoubleIndexedTests::TestErase [GOOD] |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TYardTest::TestAllocateAllChunks [GOOD] >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> HttpProxy::BasicParsing [GOOD] >> HttpProxy::HeaderParsingError_Request [GOOD] >> HttpProxy::HeaderParsingError_Response [GOOD] >> HttpProxy::GetWithSpecifiedContentType [GOOD] >> HttpProxy::BasicParsingChunkedBodyRequest [GOOD] >> HttpProxy::BasicPost [GOOD] >> HttpProxy::BasicParsingChunkedBodyResponse [GOOD] >> HttpProxy::InvalidParsingChunkedBody [GOOD] >> HttpProxy::AdvancedParsingChunkedBody [GOOD] >> HttpProxy::CreateCompressedResponse [GOOD] >> HttpProxy::BasicPartialParsing [GOOD] >> HttpProxy::BasicPartialParsingChunkedBody [GOOD] >> HttpProxy::BasicParsingContentLength0 [GOOD] >> HttpProxy::AdvancedParsing [GOOD] >> HttpProxy::AdvancedPartialParsing [GOOD] >> HttpProxy::BasicRenderBodyWithHeadersAndCookies [GOOD] >> HttpProxy::BasicRenderOutgoingResponse [GOOD] >> HttpProxy::BasicRunning4 [GOOD] >> HttpProxy::BasicRunning6 |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> HttpProxy::BasicRunning6 [GOOD] >> TYardTest::TestCutMultipleLogChunks [GOOD] >> TYardTest::TestDestructionWhileWritingChunk >> HttpProxy::TlsRunning [GOOD] >> HttpProxy::TooLongHeader [GOOD] >> HttpProxy::HeaderWithoutAColon ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/version/ut/unittest >> YdbVersion::DefaultNextMajor [GOOD] Test command err: Application: "ydb" Version { Year: 25 Major: 1 } CanLoadFrom { LowerLimit { Year: 24 Major: 4 } UpperLimit { Year: 25 Major: 1 } } StoresReadableBy { LowerLimit { Year: 24 Major: 4 } UpperLimit { Year: 25 Major: 1 } } CanConnectTo { LowerLimit { Year: 24 Major: 4 } UpperLimit { Year: 25 Major: 1 } } CanConnectTo { Application: "nbs" LowerLimit { Year: 24 Major: 3 } UpperLimit { Year: 25 Major: 1 } } >> HttpProxy::HeaderWithoutAColon [GOOD] >> HttpProxy::TooManyRequests [GOOD] >> HttpProxy::ChunkedResponse1 [GOOD] >> HttpProxy::ChunkedResponse2 [GOOD] >> HttpProxy::ChunkedResponse3 [GOOD] >> HttpProxy::RequestAfter307 [GOOD] >> TYardTest::TestDestructionWhileWritingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingChunk >> ExternalDataSourceTest::ValidateName [GOOD] >> ExternalDataSourceTest::ValidatePack [GOOD] >> ExternalDataSourceTest::ValidateAuth [GOOD] >> ExternalDataSourceTest::ValidateParameters [GOOD] >> ExternalDataSourceTest::ValidateHasExternalTable [GOOD] >> ExternalDataSourceTest::ValidateProperties [GOOD] >> ExternalDataSourceTest::ValidateLocation [GOOD] >> ObjectStorageTest::SuccessValidation [GOOD] >> ObjectStorageTest::FailedCreate [GOOD] >> ObjectStorageTest::FailedValidation [GOOD] >> ObjectStorageTest::FailedJsonListValidation [GOOD] >> ObjectStorageTest::FailedOptionalTypeValidation [GOOD] >> ObjectStorageTest::WildcardsValidation [GOOD] >> TYardTest::TestDestructionWhileReadingChunk [GOOD] >> TYardTest::TestDestructionWhileReadingLog |80.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut >> TYardTest::TestDestructionWhileReadingLog [GOOD] >> TYardTest::TestFormatInfo [GOOD] >> TYardTest::TestEnormousDisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestAllocateAllChunks [GOOD] Test command err: 2025-07-08T11:54:44.410329Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:44.410880Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:44.411808Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1 2025-07-08T11:54:44.447748Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:44.447773Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1 2025-07-08T11:54:44.447980Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 9489503970911323650 MagicLogChunk: 12184689480811969112 MagicDataChunk: 1295150619501744433 MagicSysLogChunk: 800785676951182946 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975684427422 (2025-07-08T11:54:44.427422Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:44.449758Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:44.450575Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:44.450795Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:44.451227Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:44.508434Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1720277 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:44.529397Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:44.530688Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 9489503970911323650 MagicLogChunk: 12184689480811969112 MagicDataChunk: 1295150619501744433 MagicSysLogChunk: 800785676951182946 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975684427422 (2025-07-08T11:54:44.427422Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:44.531841Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1753995 NonceLog# 1720277 NonceData# 1076828} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:44.532552Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:44.532571Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2025-07-08T11:54:44.532585Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:44.532912Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:44.629277Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:44.633030Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 1 SectorIdx# 3 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 1 LogEndSectorIdx# 3 PDiskId# 1 2025-07-08T11:54:44.633063Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 2025-07-08T11:54:44.693094Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:44.693310Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 10736339284590244790 MagicNextLogChunkReference: 6169132744083027247 MagicLogChunk: 13705031660348218340 MagicDataChunk: 17750276167091858905 MagicSysLogChunk: 589854040775649491 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975684668968 (2025-07-08T11:54:44.668968Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:44.697013Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:44.698257Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:44.698278Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:44.698740Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:44.794924Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2074036 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:44.814682Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:44.814893Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 10736339284590244790 MagicNextLogChunkReference: 6169132744083027247 MagicLogChunk: 13705031660348218340 MagicDataChunk: 17750276167091858905 MagicSysLogChunk: 589854040775649491 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975684668968 (2025-07-08T11:54:44.668968Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:44.815971Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1699594 NonceLog# 2074036 NonceData# 1317681} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:44.816655Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:44.818728Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 3 SectorIdx# 211 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 3 OffsetInChunk# 864256} PDiskId# 1 2025-07-08T11:54:44.818746Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 3 OffsetInChunk# 864256} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:44.818983Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:44.917239Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:44.919628Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 3 SectorIdx# 212 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 3 LogEndSectorIdx# 212 PDiskId# 1 2025-07-08T11:54:44.919650Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 3} PDiskId# 1 2025-07-08T11:54:45.717748Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:45.717928Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 10390521398576762828 MagicNextLogChunkReference: 5963827261840433720 MagicLogChunk: 359335888703549336 MagicDataChunk: 13834569813061883563 MagicSysLogChunk: 8488318877463390333 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975685688896 (2025-07-08T11:54:45.688896Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:45.719059Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:45.719688Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:45.719713Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:45.720002Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:45.817083Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1776643 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/http/ut/unittest >> HttpProxy::RequestAfter307 [GOOD] Test command err: 2025-07-08T11:54:46.037079Z :HTTP INFO: Listening on http://[::]:9784 2025-07-08T11:54:46.037173Z :HTTP TRACE: Register handler /test to [4:7524678276989613004:2053] 2025-07-08T11:54:46.037204Z :HTTP DEBUG: Connection created [4:7524678276989613006:2055] 2025-07-08T11:54:46.037213Z :HTTP DEBUG: resolving [::1]:9784 2025-07-08T11:54:46.037227Z :HTTP DEBUG: connecting 2025-07-08T11:54:46.037337Z :HTTP DEBUG: (#9,[::1]:9784) outgoing connection opened 2025-07-08T11:54:46.037340Z :HTTP DEBUG: (#9,[::1]:9784) <- (GET /test) 2025-07-08T11:54:46.037377Z :HTTP DEBUG: (#10,[::1]:35330) incoming connection opened 2025-07-08T11:54:46.037398Z :HTTP DEBUG: (#10,[::1]:35330) -! (GET /test) 2025-07-08T11:54:46.037409Z :HTTP DEBUG: (#10,[::1]:35330) <- (400 Bad Request) 2025-07-08T11:54:46.037424Z :HTTP DEBUG: (#10,[::1]:35330) Request: GET /test HTTP/1.1 Host: [::1]:9784 Accept: */* Connection: close XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX 2025-07-08T11:54:46.037429Z :HTTP DEBUG: (#10,[::1]:35330) Response: HTTP/1.1 400 Bad Request Connection: close Content-Type: text/plain Content-Length: 19 Invalid http header 2025-07-08T11:54:46.037450Z :HTTP DEBUG: (#10,[::1]:35330) connection closed 2025-07-08T11:54:46.037490Z :HTTP DEBUG: (#9,[::1]:9784) -> (400 Bad Request) 2025-07-08T11:54:46.037495Z :HTTP DEBUG: (#9,[::1]:9784) connection closed 2025-07-08T11:54:46.037546Z :HTTP DEBUG: Connection closed [4:7524678276989613006:2055] 2025-07-08T11:54:46.082849Z :HTTP INFO: Listening on http://[::]:14220 2025-07-08T11:54:46.083159Z :HTTP DEBUG: Connection created [7:7524678276834481896:2055] 2025-07-08T11:54:46.083179Z :HTTP DEBUG: resolving 127.0.0.1:14220 2025-07-08T11:54:46.083194Z :HTTP DEBUG: connecting 2025-07-08T11:54:46.083298Z :HTTP DEBUG: (#9,127.0.0.1:14220) outgoing connection opened 2025-07-08T11:54:46.083303Z :HTTP DEBUG: (#9,127.0.0.1:14220) <- (GET /test) 2025-07-08T11:54:46.083320Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) incoming connection opened 2025-07-08T11:54:46.083408Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) -> (GET /test) 2025-07-08T11:54:46.083494Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) <- (200 Found) (incomplete) 2025-07-08T11:54:46.083551Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) <- (data chunk 8 bytes) 2025-07-08T11:54:46.083568Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) <- (data chunk 8 bytes) 2025-07-08T11:54:46.083574Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) <- (data chunk 5 bytes) 2025-07-08T11:54:46.083613Z :HTTP DEBUG: (#9,127.0.0.1:14220) -> (200 Found) 2025-07-08T11:54:46.083627Z :HTTP DEBUG: (#9,127.0.0.1:14220) connection closed 2025-07-08T11:54:46.083676Z :HTTP DEBUG: Connection closed [7:7524678276834481896:2055] 2025-07-08T11:54:46.083682Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:49360) connection closed 2025-07-08T11:54:46.101898Z :HTTP INFO: Listening on http://[::]:12423 2025-07-08T11:54:46.102008Z :HTTP DEBUG: Connection created [8:7524678276093136314:2055] 2025-07-08T11:54:46.102041Z :HTTP DEBUG: resolving 127.0.0.1:12423 2025-07-08T11:54:46.102063Z :HTTP DEBUG: connecting 2025-07-08T11:54:46.102206Z :HTTP DEBUG: (#9,127.0.0.1:12423) outgoing connection opened 2025-07-08T11:54:46.102211Z :HTTP DEBUG: (#9,127.0.0.1:12423) <- (GET /test) 2025-07-08T11:54:46.102244Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) incoming connection opened 2025-07-08T11:54:46.102266Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) -> (GET /test) 2025-07-08T11:54:46.102361Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) <- (200 Found) (incomplete) 2025-07-08T11:54:46.102404Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) <- (data chunk 8 bytes) 2025-07-08T11:54:46.102426Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) <- (data chunk 13 bytes, final) 2025-07-08T11:54:46.102446Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:57044) connection closed 2025-07-08T11:54:46.102454Z :HTTP DEBUG: (#9,127.0.0.1:12423) -> (200 Found) 2025-07-08T11:54:46.102461Z :HTTP DEBUG: (#9,127.0.0.1:12423) connection closed 2025-07-08T11:54:46.102546Z :HTTP DEBUG: Connection closed [8:7524678276093136314:2055] 2025-07-08T11:54:46.109066Z :HTTP INFO: Listening on http://[::]:11253 2025-07-08T11:54:46.109211Z :HTTP DEBUG: Connection created [9:7524678277025003720:2055] 2025-07-08T11:54:46.109241Z :HTTP DEBUG: resolving 127.0.0.1:11253 2025-07-08T11:54:46.109259Z :HTTP DEBUG: connecting 2025-07-08T11:54:46.109391Z :HTTP DEBUG: (#9,127.0.0.1:11253) outgoing connection opened 2025-07-08T11:54:46.109395Z :HTTP DEBUG: (#9,127.0.0.1:11253) <- (GET /test) 2025-07-08T11:54:46.109415Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:40740) incoming connection opened 2025-07-08T11:54:46.109468Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:40740) -> (GET /test) 2025-07-08T11:54:46.109609Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:40740) <- (200 Found) (incomplete) 2025-07-08T11:54:46.109664Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:40740) <- (data chunk 8 bytes) 2025-07-08T11:54:46.109687Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:40740) <- (data chunk 8 bytes) 2025-07-08T11:54:46.109706Z :HTTP ERROR: (#10,[::ffff:127.0.0.1]:40740) connection closed - DataChunk error: error 2025-07-08T11:54:46.109787Z :HTTP ERROR: (#9,127.0.0.1:11253) connection closed with error: ConnectionClosed 2025-07-08T11:54:46.109913Z :HTTP DEBUG: Connection closed [9:7524678277025003720:2055] 2025-07-08T11:54:46.116575Z :HTTP INFO: Listening on http://[::]:31407 2025-07-08T11:54:46.116629Z :HTTP TRACE: Register handler /test1 to [10:7524678276271080231:2053] 2025-07-08T11:54:46.116632Z :HTTP TRACE: Register handler /test2 to [10:7524678276271080231:2053] 2025-07-08T11:54:46.116639Z :HTTP DEBUG: Creating a new connection for destination http://127.0.0.1:31407 2025-07-08T11:54:46.116642Z :HTTP DEBUG: Connection created [10:7524678276271080233:2055] 2025-07-08T11:54:46.116645Z :HTTP DEBUG: resolving 127.0.0.1:31407 2025-07-08T11:54:46.116655Z :HTTP DEBUG: connecting 2025-07-08T11:54:46.116756Z :HTTP DEBUG: (#9,127.0.0.1:31407) outgoing connection opened 2025-07-08T11:54:46.116758Z :HTTP DEBUG: (#9,127.0.0.1:31407) <- (GET /test1) 2025-07-08T11:54:46.121090Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:36752) incoming connection opened 2025-07-08T11:54:46.121118Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:36752) -> (GET /test1) 2025-07-08T11:54:46.121180Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:36752) <- (307 Temporary Redirect) 2025-07-08T11:54:46.121246Z :HTTP DEBUG: (#9,127.0.0.1:31407) -> (307 Temporary Redirect) 2025-07-08T11:54:46.121251Z :HTTP DEBUG: (#9,127.0.0.1:31407) connection available for reuse 2025-07-08T11:54:46.121268Z :HTTP DEBUG: Connection [10:7524678276271080233:2055] available for destination http://127.0.0.1:31407 2025-07-08T11:54:46.121279Z :HTTP DEBUG: Reusing connection [10:7524678276271080233:2055] for destination http://127.0.0.1:31407 2025-07-08T11:54:46.121286Z :HTTP DEBUG: (#9,127.0.0.1:31407) <- (GET /test2) 2025-07-08T11:54:46.121310Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:36752) -> (GET /test2) 2025-07-08T11:54:46.121337Z :HTTP DEBUG: (#10,[::ffff:127.0.0.1]:36752) <- (200 Ok) 2025-07-08T11:54:46.121357Z :HTTP DEBUG: (#9,127.0.0.1:31407) -> (200 Ok) 2025-07-08T11:54:46.121361Z :HTTP DEBUG: (#9,127.0.0.1:31407) connection available for reuse 2025-07-08T11:54:46.121377Z :HTTP DEBUG: Connection [10:7524678276271080233:2055] available for destination http://127.0.0.1:31407 >> CompositeArrayAccessor::SlicesSimple [GOOD] >> CompositeArrayAccessor::FilterSimple [GOOD] >> TBsVDiskRepl3::ReplPerf [GOOD] |79.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/external_sources/ut/unittest >> ObjectStorageTest::WildcardsValidation [GOOD] |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/dq/provider/ut/ydb-library-yql-providers-dq-provider-ut |79.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util |79.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/dnsresolver/ut/ydb-library-actors-dnsresolver-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRepl3::ReplPerf [GOOD] Test command err: 2025-07-08T11:54:27.855748Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-07-08T11:54:27.865479Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4009768739494537823] 2025-07-08T11:54:28.886167Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-07-08T11:54:33.610650Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-07-08T11:54:33.639425Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1180978799131043476] 2025-07-08T11:54:34.678755Z :BS_SYNCER ERROR: VDISK[0:_:0:3:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-07-08T11:54:43.307142Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-07-08T11:54:43.332045Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 5433607603796814924] 2025-07-08T11:54:44.346794Z :BS_SYNCER ERROR: VDISK[0:_:0:1:1]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/accessor/composite/ut/unittest >> CompositeArrayAccessor::FilterSimple [GOOD] Test command err: [[null,"a1",null,"a3",null,null,null,"a7",null,null],[null,"b1","b2","b3",null],["c0",null,"c2","c3"]] |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/core/harmonizer/ut/ydb-library-actors-core-harmonizer-ut |79.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut |79.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/libpy3benchmarks-runner-result_convert.global.a |79.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut >> TBsVDiskManyPutGet::ManyMultiSinglePutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |79.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/sequenceshard/public/ut/unittest >> TBsLocalRecovery::WriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartRead >> TMemoryPoolTest::AppendString [GOOD] >> TestCommon::Empty [GOOD] |78.8%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/cppcoro/corobenchmark/corobenchmark >> UtilString::ShrinkToFit [GOOD] >> TMemoryPoolTest::Transactions [GOOD] >> TMemoryPoolTest::LongRollback [GOOD] >> TMemoryPoolTest::AllocOneByte [GOOD] >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TestCommon::ParseCounterName [GOOD] >> TestCommon::CollectTaskRunnerStatisticsByStage [GOOD] >> TestCommon::CollectTaskRunnerStatisticsByTask [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGet [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch |78.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/wilson/ut/ydb-library-actors-wilson-ut >> ydb-library-benchmarks-runner::import_test [GOOD] |78.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut |78.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |78.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/dq/provider/ut/unittest >> TestCommon::CollectTaskRunnerStatisticsByTask [GOOD] >> TYardTest::TestStartingPointReboots [GOOD] >> ArrowTest::BatchBuilder [GOOD] >> TYardTest::TestRestartAtNonceJump >> ArrowTest::ArrowToYdbConverter [GOOD] >> ArrowTest::SortWithCompositeKey [GOOD] >> ArrowTest::MergingSortedInputStream [GOOD] >> ArrowTest::MergingSortedInputStreamReversed [GOOD] >> ArrowTest::MergingSortedInputStreamReplace [GOOD] >> ArrowTest::MaxVersionFilter [GOOD] >> ArrowTest::EqualKeysVersionFilter [GOOD] >> ColumnFilter::MergeFilters [GOOD] >> ColumnFilter::CombineFilters [GOOD] >> ColumnFilter::ApplyFilterToFilter [GOOD] >> ColumnFilter::FilterSlice [GOOD] >> ColumnFilter::FilterCheckSlice [GOOD] >> ColumnFilter::FilterSlice1 [GOOD] >> ColumnFilter::CutFilter1 [GOOD] >> ColumnFilter::CutFilter2 [GOOD] >> Dictionary::Simple |78.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_util/unittest >> TMemoryPoolTest::TransactionsWithAlignment [GOOD] >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |78.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/import_test >> ydb-library-benchmarks-runner::import_test [GOOD] |78.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/actors/cppcoro/ut/ydb-library-actors-cppcoro-ut |78.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> TBsLocalRecovery::MultiPutWriteRestartRead [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge >> TYardTest::TestRestartAtNonceJump [GOOD] >> TYardTest::TestRestartAtChunkEnd >> TTraceId::OpenTelemetryHeaderParser [GOOD] |78.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskManyPutGet::ManyMultiPutGetWithLargeBatch [GOOD] |78.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/union_copy_set/ut/ydb-library-union_copy_set-ut >> TPGTest::TestLogin |78.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug_tools/ut/ydb-core-debug_tools-ut >> TPGTest::TestLogin [GOOD] >> TBsLocalRecovery::MultiPutWriteRestartReadHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX >> TYardTest::TestRestartAtChunkEnd [GOOD] >> TYardTestRestore::TestRestore15 |77.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/wilson/ut/unittest >> TTraceId::OpenTelemetryHeaderParser [GOOD] >> TYardTestRestore::TestRestore15 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/pgproxy/ut/unittest >> TPGTest::TestLogin [GOOD] Test command err: 2025-07-08T11:54:49.913129Z :PGWIRE INFO: Listening on [::]:12982 2025-07-08T11:54:49.913472Z :PGWIRE DEBUG: (#13,[::1]:45858) incoming connection opened 2025-07-08T11:54:49.913509Z :PGWIRE DEBUG: (#13,[::1]:45858) -> [1] 'i' "Initial" Size(15) protocol(0x00000300) user=user 2025-07-08T11:54:49.913532Z :PGWIRE DEBUG: (#13,[::1]:45858) <- [1] 'R' "Auth" Size(4) OK >> Task::SimpleVoidCoroutine [GOOD] >> Task::SimpleIntCoroutine [GOOD] >> Task::SimpleVoidWhenDone [GOOD] >> Task::SimpleIntWhenDone [GOOD] >> Task::NestedAwait [GOOD] >> Task::PauseResume [GOOD] >> Task::PauseCancel [GOOD] >> Task::GroupWithTwoSubTasks [GOOD] >> Task::GroupWithTwoSubTasksDetached [GOOD] >> Task::GroupWithTwoSubTasksOneCancelled [GOOD] >> TaskActor::Basic >> ParseStats::ParseWithSources [GOOD] >> TaskActor::Basic [GOOD] >> ParseStats::ParseJustOutput [GOOD] >> ParseStats::ParseMultipleGraphsV1 [GOOD] >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> UnionCopySet::Simple [GOOD] >> UnionCopySet::NotDisjoint [GOOD] >> UnionCopySet::StressDestroyUp >> UnionCopySet::MoveAdd [GOOD] >> UnionCopySet::ItemAddedToManySets [GOOD] >> UnionCopySet::NotDisjointOptimizeUnionPairWhileDestroying [GOOD] >> UnionCopySet::SetAddedToManySets [GOOD] >> UnionCopySet::NotDisjointOptimizeCopyPairWhileDestroying [GOOD] >> OperationLog::Size8 [GOOD] >> OperationLog::Size29 [GOOD] >> OperationLog::Size1 [GOOD] >> OperationLog::Size1000 >> TPDiskRaces::KillOwnerWhileDeletingChunk [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTestRestore::TestRestore15 [GOOD] Test command err: 2025-07-08T11:54:30.598851Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.600104Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 3700460402880134612 MagicNextLogChunkReference: 16907724938701776378 MagicLogChunk: 1380315967419719175 MagicDataChunk: 5009993902013636187 MagicSysLogChunk: 18231782278839157864 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975670534142 (2025-07-08T11:54:30.534142Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:30.613150Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:30.616469Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:30.616769Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.617782Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:30.691118Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1890544 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2025-07-08T11:54:30.696060Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.775479Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.780125Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:30.785180Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1 2025-07-08T11:54:30.872458Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.872477Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1 2025-07-08T11:54:30.873488Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 434505436550331362 MagicLogChunk: 11033948650018082190 MagicDataChunk: 9477176328616857737 MagicSysLogChunk: 18138598127205299957 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975670850705 (2025-07-08T11:54:30.850705Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:30.874927Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:30.879302Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:30.879330Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.880525Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:30.880653Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1301800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.880872Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1301800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.881029Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1301800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.881287Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1301800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.881433Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1301800 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:31.197208Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:31.197436Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 434505436550331362 MagicLogChunk: 11033948650018082190 MagicDataChunk: 9477176328616857737 MagicSysLogChunk: 18138598127205299957 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975670850705 (2025-07-08T11:54:30.850705Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:31.199512Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1148510 NonceLog# 1307826 NonceData# 2094744} LogHeadChunkIdx# 106 LogHeadChunkPreviousNonce# 1304348 Owner[3]# [0:4294967295:0:0:0] Owner[4]# [1:4294967295:0:0:0] Owner[5]# [2:4294967295:0:0:0] Owner[6]# [3:4294967295:0:0:0] Owner[7]# [4:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:31.210101Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 112 SectorIdx# 418 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 112 OffsetInChunk# 1712128} PDiskId# 1 2025-07-08T11:54:31.210131Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 112 OffsetInChunk# 1712128} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:31.210890Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:31.295581Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [1:4294967295:0:0:0] OwnerId# 4 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:31.295686Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:31.295755Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [3:4294967295:0:0:0] OwnerId# 6 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:31.295825Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [4:4294967295:0:0:0] OwnerId# 7 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:31.295892Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [2:4294967295:0:0:0] OwnerId# 5 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:31.343945Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 6 VDiskId# [3:_:0:0:0] ChunkIdx# 112 SectorIdx# 419 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 112 LogEndSectorIdx# 419 PDiskId# 1 2025-07-08T11:54:31.343975Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 6 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1011} PDiskId# 1 2025-07-08T11:54:31.344015Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 7 VDiskId# [4:_:0:0:0] ChunkIdx# 112 SectorIdx# 419 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 112 LogEndSectorIdx# 419 PDiskId# 1 2025-07-08T11:54:31.344024Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 7 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1011} PDiskId# 1 2025-07-08T11:54:31.344066Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 5 VDiskId# [2:_:0:0:0] ChunkIdx# 112 SectorIdx# 419 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 112 LogEndSectorIdx# 419 PDiskId# 1 2025-07-08T11:54:31.344071Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 5 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1011} PDiskId# 1 2025-07-08T11:54:31.344107Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 4 VDiskId# [1:_:0:0:0] ChunkIdx# 112 SectorIdx# 419 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 112 LogEndSectorIdx# 419 PDiskId# 1 2025-07-08T11:54:31.344111Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 4 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags ... IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 3 SectorIdx# 187 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 3 LogEndSectorIdx# 187 PDiskId# 1 2025-07-08T11:54:49.696591Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2025-07-08T11:54:49.734392Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:49.734566Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 17390677704576421016 MagicNextLogChunkReference: 14739682279773152023 MagicLogChunk: 16274797204653984165 MagicDataChunk: 3832233058362019837 MagicSysLogChunk: 13940921578565831716 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975689710286 (2025-07-08T11:54:49.710286Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:49.735635Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:49.736792Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:49.736818Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:49.737063Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:49.833787Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1394886 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:49.834128Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:49.938604Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:49.938948Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 17390677704576421016 MagicNextLogChunkReference: 14739682279773152023 MagicLogChunk: 16274797204653984165 MagicDataChunk: 3832233058362019837 MagicSysLogChunk: 13940921578565831716 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975689710286 (2025-07-08T11:54:49.710286Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:49.940090Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1083897 NonceLog# 1394886 NonceData# 1737528} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:49.941005Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:49.942533Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:49.942558Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:49.942827Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:50.038231Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:50.039780Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 508} PDiskId# 1 2025-07-08T11:54:50.120891Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:50.121081Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 17390677704576421016 MagicNextLogChunkReference: 14739682279773152023 MagicLogChunk: 16274797204653984165 MagicDataChunk: 3832233058362019837 MagicSysLogChunk: 13940921578565831716 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975689710286 (2025-07-08T11:54:49.710286Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:50.122067Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2434938 NonceLog# 3335836 NonceData# 3172107} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:50.122719Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:50.123603Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 1395394 " with nonceJumpLogPageHeader2->PreviousNonce# "# 1395394 PDiskId# 1 2025-07-08T11:54:50.124899Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 3 SectorIdx# 186 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 3 OffsetInChunk# 761856} PDiskId# 1 2025-07-08T11:54:50.124922Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 3 OffsetInChunk# 761856} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:50.125182Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:50.220322Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:50.223629Z :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [0:_:0:0:0] ChunkIdx# 3 SectorIdx# 187 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 3 LogEndSectorIdx# 187 PDiskId# 1 2025-07-08T11:54:50.223655Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1202} PDiskId# 1 2025-07-08T11:54:50.256527Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:50.256995Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 4856166154145189800 MagicNextLogChunkReference: 18240258303820798586 MagicLogChunk: 6863493887620791623 MagicDataChunk: 1615214924544288143 MagicSysLogChunk: 17046101307464540996 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975690236857 (2025-07-08T11:54:50.236857Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:50.258442Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:50.259000Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:50.259036Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:50.259641Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:50.355691Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1983274 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 |77.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/common/kqp_tx.cpp |77.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest >> ParseStats::ParseMultipleGraphsV2 [GOOD] >> OperationLog::Size1000 [GOOD] >> OperationLog::ConcurrentWrites |77.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/cppcoro/ut/unittest >> TaskActor::Basic [GOOD] |77.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> OperationLog::ConcurrentWrites [GOOD] >> Dictionary::Simple [GOOD] >> Dictionary::ComparePayloadAndFull >> Dictionary::ComparePayloadAndFull [GOOD] |76.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/common/kqp_tx.cpp |76.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/debug_tools/ut/unittest >> OperationLog::ConcurrentWrites [GOOD] |76.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/workload/benchmark_base/ut/ydb-library-workload-benchmark_base-ut >> Hash::ScalarBinaryHash [GOOD] >> Hash::ScalarCTypeHash [GOOD] >> Hash::ScalarCompositeHash [GOOD] >> ProgramStep::Round0 [GOOD] >> ProgramStep::Round1 [GOOD] >> ProgramStep::Filter [GOOD] >> ProgramStep::Add [GOOD] >> ProgramStep::Substract >> TBsVDiskOutOfSpace::WriteUntilYellowZone [GOOD] >> ProgramStep::Substract [GOOD] >> ProgramStep::Multiply [GOOD] >> ProgramStep::Divide [GOOD] >> ProgramStep::Gcd [GOOD] >> ProgramStep::Lcm [GOOD] >> ProgramStep::Mod [GOOD] >> ProgramStep::ModOrZero [GOOD] >> ProgramStep::Abs [GOOD] >> ProgramStep::Negate [GOOD] >> TBsVDiskRange::RangeGetFromEmptyDB >> ProgramStep::Compares [GOOD] >> ProgramStep::Logic0 [GOOD] >> ProgramStep::Logic1 [GOOD] >> ProgramStep::StartsWith [GOOD] >> ProgramStep::EndsWith [GOOD] >> ProgramStep::MatchSubstring [GOOD] >> ProgramStep::StartsWithIgnoreCase [GOOD] >> ProgramStep::EndsWithIgnoreCase [GOOD] >> ProgramStep::MatchSubstringIgnoreCase [GOOD] >> ProgramStep::ScalarTest [GOOD] >> ProgramStep::TestValueFromNull [GOOD] >> ProgramStep::MergeFilterSimple [GOOD] >> ProgramStep::Projection [GOOD] >> ProgramStep::MinMax [GOOD] >> ProgramStep::Sum [GOOD] >> ProgramStep::SumGroupBy [GOOD] >> ProgramStep::SumGroupByNotNull [GOOD] >> ProgramStep::MinMaxSomeGroupBy [GOOD] >> ProgramStep::MinMaxSomeGroupByNotNull [GOOD] >> SortableBatchPosition::FindPosition [GOOD] >> JsonEnvelopeTest::Simple [GOOD] >> JsonEnvelopeTest::NoReplace [GOOD] >> JsonEnvelopeTest::ArrayItem [GOOD] >> JsonEnvelopeTest::Escape [GOOD] >> JsonEnvelopeTest::BinaryData [GOOD] >> ydb-tests-functional-config::import_test [GOOD] |76.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/libpy3benchmarks-runner-result_compare.global.a >> TBsVDiskRange::RangeGetFromEmptyDB [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh |76.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/log_backend/ut/unittest >> JsonEnvelopeTest::BinaryData [GOOD] >> UnionCopySet::StressDestroyUp [GOOD] >> UnionCopySet::StressDestroyDown |76.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/config/import_test >> ydb-tests-functional-config::import_test [GOOD] >> DataGeneratorState::PortionProcessing [GOOD] >> DataGeneratorState::SaveLoad [GOOD] >> DynamicProxy::RaceCheck10 [GOOD] >> EventHolderPool::Overflow [GOOD] >> EventHolderPool::MemConsumptionSmall ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/ut/unittest >> SortableBatchPosition::FindPosition [GOOD] Test command err: Process: 100000d;/100000; 10000d;/10000; NO_CODEC(poolsize=1024;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=1024;keylen=10) 0.1534132783 0.2482180533 NO_CODEC(poolsize=1024;keylen=16) 0.1104676508 0.2045372848 NO_CODEC(poolsize=1024;keylen=32) 0.06592569055 0.1591802296 NO_CODEC(poolsize=1024;keylen=64) 0.03972180035 0.1324717476 NO_CODEC(poolsize=128;keylen=1) 0.2016566193 0.2164784476 NO_CODEC(poolsize=128;keylen=10) 0.07304169975 0.08752922393 NO_CODEC(poolsize=128;keylen=16) 0.05151637558 0.06514358749 NO_CODEC(poolsize=128;keylen=32) 0.02919093319 0.04189888314 NO_CODEC(poolsize=128;keylen=64) 0.01605694811 0.02821124922 NO_CODEC(poolsize=16;keylen=1) 0.2010010074 0.2099570542 NO_CODEC(poolsize=16;keylen=10) 0.0719219365 0.07635285397 NO_CODEC(poolsize=16;keylen=16) 0.05039654131 0.05396013899 NO_CODEC(poolsize=16;keylen=32) 0.02807102527 0.03070808446 NO_CODEC(poolsize=16;keylen=64) 0.01493699686 0.01701612239 NO_CODEC(poolsize=1;keylen=1) 0.2008730831 0.2086845872 NO_CODEC(poolsize=1;keylen=10) 0.07177339648 0.07487027428 NO_CODEC(poolsize=1;keylen=16) 0.0502445638 0.05244238527 NO_CODEC(poolsize=1;keylen=32) 0.02791992658 0.0291982148 NO_CODEC(poolsize=1;keylen=64) 0.01478641518 0.01551089526 NO_CODEC(poolsize=512;keylen=1) 0.2021203448 0.2210911404 NO_CODEC(poolsize=512;keylen=10) 0.1482943606 0.1971260763 NO_CODEC(poolsize=512;keylen=16) 0.1053484084 0.1534129488 NO_CODEC(poolsize=512;keylen=32) 0.0608061115 0.1080222928 NO_CODEC(poolsize=512;keylen=64) 0.03460202321 0.08129402495 NO_CODEC(poolsize=64;keylen=1) 0.2013687897 0.2136153969 NO_CODEC(poolsize=64;keylen=10) 0.07240183504 0.08114272681 NO_CODEC(poolsize=64;keylen=16) 0.05087647028 0.05875304549 NO_CODEC(poolsize=64;keylen=32) 0.02855098581 0.03550414104 NO_CODEC(poolsize=64;keylen=64) 0.01541697597 0.02181403389 lz4(poolsize=1024;keylen=1) 0.006629768257 0.05541610349 lz4(poolsize=1024;keylen=10) 0.04233951498 0.3344832994 lz4(poolsize=1024;keylen=16) 0.05657489465 0.404264214 lz4(poolsize=1024;keylen=32) 0.09037137941 0.5318074361 lz4(poolsize=1024;keylen=64) 0.01074936154 0.1063492063 lz4(poolsize=128;keylen=1) 0.003831111821 0.02881389382 lz4(poolsize=128;keylen=10) 0.00718182175 0.06087121933 lz4(poolsize=128;keylen=16) 0.008735936466 0.07523964551 lz4(poolsize=128;keylen=32) 0.01375268158 0.117441454 lz4(poolsize=128;keylen=64) 0.02262360212 0.1850289108 lz4(poolsize=16;keylen=1) 0.00273442178 0.01820340324 lz4(poolsize=16;keylen=10) 0.003078137332 0.02169239789 lz4(poolsize=16;keylen=16) 0.003266503667 0.02356577168 lz4(poolsize=16;keylen=32) 0.003742685614 0.02844311377 lz4(poolsize=16;keylen=64) 0.004937163375 0.03979647465 lz4(poolsize=1;keylen=1) 0.00251497006 0.01603325416 lz4(poolsize=1;keylen=10) 0.002531395234 0.01628089447 lz4(poolsize=1;keylen=16) 0.002515970516 0.01617933723 lz4(poolsize=1;keylen=32) 0.00251450677 0.01630226314 lz4(poolsize=1;keylen=64) 0.002511620933 0.01653353149 lz4(poolsize=512;keylen=1) 0.005362411291 0.04359726295 lz4(poolsize=512;keylen=10) 0.02347472854 0.1933066062 lz4(poolsize=512;keylen=16) 0.03056053336 0.2426853056 lz4(poolsize=512;keylen=32) 0.04856356058 0.3467897492 lz4(poolsize=512;keylen=64) 0.04102771881 0.3228658321 lz4(poolsize=64;keylen=1) 0.003312844256 0.02372010279 lz4(poolsize=64;keylen=10) 0.004839661617 0.03863241259 lz4(poolsize=64;keylen=16) 0.005715507689 0.04687204687 lz4(poolsize=64;keylen=32) 0.007821957352 0.06669044223 lz4(poolsize=64;keylen=64) 0.01258912656 0.1073551894 zstd(poolsize=1024;keylen=1) 0.007215007215 0.0754840827 zstd(poolsize=1024;keylen=10) 0.04436824057 0.3776978417 zstd(poolsize=1024;keylen=16) 0.06417364307 0.4694540288 zstd(poolsize=1024;keylen=32) 0.1088704328 0.6098141264 zstd(poolsize=1024;keylen=64) 0.1881404128 0.7447345433 zstd(poolsize=128;keylen=1) 0.0037131439 0.04002713704 zstd(poolsize=128;keylen=10) 0.007337810029 0.07809798271 zstd(poolsize=128;keylen=16) 0.01002666048 0.1029455519 zstd(poolsize=128;keylen=32) 0.0164095737 0.1578947368 zstd(poolsize=128;keylen=64) 0.02945264987 0.2517949988 zstd(poolsize=16;keylen=1) 0.002581457579 0.02794819359 zstd(poolsize=16;keylen=10) 0.002771136709 0.03048416019 zstd(poolsize=16;keylen=16) 0.003293212485 0.03570300158 zstd(poolsize=16;keylen=32) 0.004068848428 0.0434375 zstd(poolsize=16;keylen=64) 0.005660601031 0.05875115349 zstd(poolsize=1;keylen=1) 0.002424204263 0.02626193724 zstd(poolsize=1;keylen=10) 0.002120141343 0.0234375 zstd(poolsize=1;keylen=16) 0.002304281881 0.02519132653 zstd(poolsize=1;keylen=32) 0.002374739805 0.02573879886 zstd(poolsize=1;keylen=64) 0.002526753864 0.02699269609 zstd(poolsize=512;keylen=1) 0.005499167269 0.05848930481 zstd(poolsize=512;keylen=10) 0.02331932211 0.2237078941 zstd(poolsize=512;keylen=16) 0.03368486881 0.2936507937 zstd(poolsize=512;keylen=32) 0.05794194663 0.4212765957 zstd(poolsize=512;keylen=64) 0.1039097138 0.5749553837 zstd(poolsize=64;keylen=1) 0.003147524472 0.03401360544 zstd(poolsize=64;keylen=10) 0.004774564592 0.05176470588 zstd(poolsize=64;keylen=16) 0.006192580533 0.06557881773 zstd(poolsize=64;keylen=32) 0.009437809496 0.09619952494 zstd(poolsize=64;keylen=64) 0.01593496889 0.1514644351 NO_CODEC --1000 ----1 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----16 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----64 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; --------20168 / 20296 = 0.6306661411% ------32 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=36168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=36168;columns=1; --------36168 / 36296 = 0.35265594% ------64 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=68168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=68168;columns=1; --------68168 / 68296 = 0.1874194682% ----128 ------1 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5168;columns=1; --------5168 / 5296 = 2.416918429% ------10 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=14168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=14168;columns=1; --------14168 / 14296 = 0.8953553442% ------16 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=20168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=20168;columns=1; - ... (9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=216;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"i\":\"1,2\",\"o\":\"3,4\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"1\",\"p\":{\"address\":{\"name\":\"x\",\"id\":1}},\"o\":\"1\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"y\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"1,3,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N2 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"x\",\"id\":1},{\"name\":\"y\",\"id\":2}]},\"o\":\"1,2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=192;columns=1; |76.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump |76.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/workload/benchmark_base/ut/unittest >> DataGeneratorState::SaveLoad [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardFresh [GOOD] >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction |75.8%| [PR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/{c79bf977cdb0ffe390211f5e3d.yasm ... ro_363ad6a7a0ee9cfe4ed6517f8f.rodata} >> EventHolderPool::MemConsumptionSmall [GOOD] >> EventHolderPool::MemConsumptionLarge >> EventHolderPool::MemConsumptionLarge [GOOD] >> Interconnect::SessionContinuation |75.6%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut >> UnionCopySet::StressDestroyDown [GOOD] >> UnionCopySet::MovingSetsAndValues [GOOD] >> UnionCopySet::DeepDestroyUp >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> UnionCopySet::DeepDestroyUp [GOOD] >> UnionCopySet::DeepDestroyDown |75.1%| [AS] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/c79bf977cdb0ffe390211f5e3d.yasm |75.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |75.1%| [AS] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/c79bf977cdb0ffe390211f5e3d.yasm |75.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskRange::Simple3PutRangeGetAllBackwardCompaction [GOOD] >> UnionCopySet::DeepDestroyDown [GOOD] >> UnionCopySet::DebugString [GOOD] |74.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_convert/result_convert |74.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |74.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/union_copy_set/ut/unittest >> UnionCopySet::DebugString [GOOD] |74.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |74.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/opt/ut/ydb-library-yql-dq-opt-ut >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] >> TPDiskRaces::Decommit [GOOD] >> TPDiskRaces::DecommitWithInflight >> test.py::test[solomon-InvalidProject-] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> Backpressure::MonteCarlo [GOOD] |74.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.a |74.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> TVDiskDefrag::HugeHeapDefragmentationRequired [GOOD] |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.a >> DQCBO::RelCollector [GOOD] >> HypergraphBuild::ComplexTransitiveClosure [GOOD] >> DQCBO::JoinSearch2Rels [GOOD] |74.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/result_compare/result_compare >> DQCBO::JoinSearch3Rels [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/backpressure/ut_client/unittest >> Backpressure::MonteCarlo [GOOD] >> HypergraphBuild::AnyJoinWithTransitiveClosure [GOOD] Test command err: Clock# 1970-01-01T00:00:00.000000Z elapsed# 0.000013s EventsProcessed# 0 clients.size# 0 Clock# 1970-01-01T00:00:11.184262Z elapsed# 0.000045s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:22.649357Z elapsed# 0.000051s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:33.188294Z elapsed# 0.000057s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:00:48.392193Z elapsed# 0.000062s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:06.096694Z elapsed# 0.000069s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:18.807158Z elapsed# 0.000074s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:31.034708Z elapsed# 0.000080s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:01:44.381972Z elapsed# 0.000086s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:00.447295Z elapsed# 0.000092s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:12.907074Z elapsed# 0.000098s EventsProcessed# 2 clients.size# 0 Clock# 1970-01-01T00:02:32.763402Z elapsed# 0.001804s EventsProcessed# 2210 clients.size# 1 Clock# 1970-01-01T00:02:51.867961Z elapsed# 0.003473s EventsProcessed# 4483 clients.size# 1 Clock# 1970-01-01T00:03:07.670732Z elapsed# 0.005025s EventsProcessed# 6298 clients.size# 1 Clock# 1970-01-01T00:03:19.822025Z elapsed# 0.006013s EventsProcessed# 7725 clients.size# 1 Clock# 1970-01-01T00:03:29.949634Z elapsed# 0.007062s EventsProcessed# 8939 clients.size# 1 Clock# 1970-01-01T00:03:45.837486Z elapsed# 0.009116s EventsProcessed# 10831 clients.size# 1 Clock# 1970-01-01T00:03:56.330876Z elapsed# 0.010205s EventsProcessed# 12011 clients.size# 1 Clock# 1970-01-01T00:04:08.706276Z elapsed# 0.011723s EventsProcessed# 13479 clients.size# 1 Clock# 1970-01-01T00:04:24.906103Z elapsed# 0.011791s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:04:41.963520Z elapsed# 0.011801s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:05:01.852352Z elapsed# 0.011810s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:05:14.219659Z elapsed# 0.011818s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:05:27.809860Z elapsed# 0.011826s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:05:46.042953Z elapsed# 0.011835s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:06:03.294976Z elapsed# 0.011844s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:06:20.100963Z elapsed# 0.011852s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:06:32.799063Z elapsed# 0.011860s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:06:45.504362Z elapsed# 0.011869s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:06:56.775544Z elapsed# 0.011877s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:07:13.964279Z elapsed# 0.011885s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:07:30.481563Z elapsed# 0.011893s EventsProcessed# 13481 clients.size# 0 Clock# 1970-01-01T00:07:48.441515Z elapsed# 0.013908s EventsProcessed# 15722 clients.size# 1 Clock# 1970-01-01T00:08:03.897202Z elapsed# 0.015114s EventsProcessed# 17488 clients.size# 1 Clock# 1970-01-01T00:08:14.552955Z elapsed# 0.016342s EventsProcessed# 18723 clients.size# 1 Clock# 1970-01-01T00:08:34.277381Z elapsed# 0.018551s EventsProcessed# 21005 clients.size# 1 Clock# 1970-01-01T00:08:46.088611Z elapsed# 0.020045s EventsProcessed# 22487 clients.size# 1 Clock# 1970-01-01T00:08:59.555049Z elapsed# 0.021224s EventsProcessed# 23984 clients.size# 1 Clock# 1970-01-01T00:09:09.612484Z elapsed# 0.022356s EventsProcessed# 25171 clients.size# 1 Clock# 1970-01-01T00:09:24.790788Z elapsed# 0.024256s EventsProcessed# 26978 clients.size# 1 Clock# 1970-01-01T00:09:34.979139Z elapsed# 0.025546s EventsProcessed# 28193 clients.size# 1 Clock# 1970-01-01T00:09:47.287250Z elapsed# 0.027043s EventsProcessed# 29640 clients.size# 1 Clock# 1970-01-01T00:09:59.121718Z elapsed# 0.028758s EventsProcessed# 31080 clients.size# 1 Clock# 1970-01-01T00:10:14.442020Z elapsed# 0.030889s EventsProcessed# 32873 clients.size# 1 Clock# 1970-01-01T00:10:28.945909Z elapsed# 0.032399s EventsProcessed# 34582 clients.size# 1 Clock# 1970-01-01T00:10:43.650416Z elapsed# 0.034049s EventsProcessed# 36390 clients.size# 1 Clock# 1970-01-01T00:11:03.214747Z elapsed# 0.036306s EventsProcessed# 38635 clients.size# 1 Clock# 1970-01-01T00:11:17.893091Z elapsed# 0.039167s EventsProcessed# 42078 clients.size# 2 Clock# 1970-01-01T00:11:35.548287Z elapsed# 0.042643s EventsProcessed# 46370 clients.size# 2 Clock# 1970-01-01T00:11:51.343745Z elapsed# 0.046018s EventsProcessed# 50168 clients.size# 2 Clock# 1970-01-01T00:12:07.541936Z elapsed# 0.049718s EventsProcessed# 54100 clients.size# 2 Clock# 1970-01-01T00:12:24.674920Z elapsed# 0.053443s EventsProcessed# 58139 clients.size# 2 Clock# 1970-01-01T00:12:41.496566Z elapsed# 0.056433s EventsProcessed# 61960 clients.size# 2 Clock# 1970-01-01T00:12:55.633552Z elapsed# 0.061522s EventsProcessed# 67069 clients.size# 3 Clock# 1970-01-01T00:13:12.921692Z elapsed# 0.067282s EventsProcessed# 73211 clients.size# 3 Clock# 1970-01-01T00:13:29.561375Z elapsed# 0.076957s EventsProcessed# 80947 clients.size# 4 Clock# 1970-01-01T00:13:42.779028Z elapsed# 0.082756s EventsProcessed# 87393 clients.size# 4 Clock# 1970-01-01T00:14:01.644129Z elapsed# 0.091126s EventsProcessed# 96408 clients.size# 4 Clock# 1970-01-01T00:14:19.186019Z elapsed# 0.100393s EventsProcessed# 104767 clients.size# 4 Clock# 1970-01-01T00:14:32.774764Z elapsed# 0.107427s EventsProcessed# 111088 clients.size# 4 Clock# 1970-01-01T00:14:51.075104Z elapsed# 0.116903s EventsProcessed# 119695 clients.size# 4 Clock# 1970-01-01T00:15:01.592045Z elapsed# 0.122232s EventsProcessed# 124827 clients.size# 4 Clock# 1970-01-01T00:15:18.035279Z elapsed# 0.129783s EventsProcessed# 132664 clients.size# 4 Clock# 1970-01-01T00:15:28.402357Z elapsed# 0.134324s EventsProcessed# 137472 clients.size# 4 Clock# 1970-01-01T00:15:41.999934Z elapsed# 0.141051s EventsProcessed# 143948 clients.size# 4 Clock# 1970-01-01T00:16:00.178841Z elapsed# 0.150549s EventsProcessed# 152651 clients.size# 4 Clock# 1970-01-01T00:16:15.664600Z elapsed# 0.158875s EventsProcessed# 160066 clients.size# 4 Clock# 1970-01-01T00:16:29.305865Z elapsed# 0.162728s EventsProcessed# 164960 clients.size# 3 Clock# 1970-01-01T00:16:42.134160Z elapsed# 0.166639s EventsProcessed# 169542 clients.size# 3 Clock# 1970-01-01T00:17:01.748557Z elapsed# 0.172211s EventsProcessed# 176433 clients.size# 3 Clock# 1970-01-01T00:17:14.401836Z elapsed# 0.176190s EventsProcessed# 180890 clients.size# 3 Clock# 1970-01-01T00:17:25.381118Z elapsed# 0.180008s EventsProcessed# 184843 clients.size# 3 Clock# 1970-01-01T00:17:38.790011Z elapsed# 0.183903s EventsProcessed# 189720 clients.size# 3 Clock# 1970-01-01T00:17:52.079223Z elapsed# 0.187960s EventsProcessed# 194318 clients.size# 3 Clock# 1970-01-01T00:18:09.650840Z elapsed# 0.193713s EventsProcessed# 200637 clients.size# 3 Clock# 1970-01-01T00:18:21.480056Z elapsed# 0.197032s EventsProcessed# 204894 clients.size# 3 Clock# 1970-01-01T00:18:33.207846Z elapsed# 0.200911s EventsProcessed# 209171 clients.size# 3 Clock# 1970-01-01T00:18:49.191850Z elapsed# 0.205893s EventsProcessed# 214823 clients.size# 3 Clock# 1970-01-01T00:19:01.354224Z elapsed# 0.210428s EventsProcessed# 219169 clients.size# 3 Clock# 1970-01-01T00:19:17.467614Z elapsed# 0.216984s EventsProcessed# 224847 clients.size# 3 Clock# 1970-01-01T00:19:35.272844Z elapsed# 0.224133s EventsProcessed# 231196 clients.size# 3 Clock# 1970-01-01T00:19:50.273159Z elapsed# 0.228432s EventsProcessed# 236436 clients.size# 3 Clock# 1970-01-01T00:20:07.721684Z elapsed# 0.232525s EventsProcessed# 240462 clients.size# 2 Clock# 1970-01-01T00:20:23.654228Z elapsed# 0.234180s EventsProcessed# 242391 clients.size# 1 Clock# 1970-01-01T00:20:35.523873Z elapsed# 0.235385s EventsProcessed# 243760 clients.size# 1 Clock# 1970-01-01T00:20:50.209287Z elapsed# 0.236792s EventsProcessed# 245435 clients.size# 1 Clock# 1970-01-01T00:21:00.422718Z elapsed# 0.237782s EventsProcessed# 246614 clients.size# 1 Clock# 1970-01-01T00:21:19.965790Z elapsed# 0.239605s EventsProcessed# 248965 clients.size# 1 Clock# 1970-01-01T00:21:35.996738Z elapsed# 0.241060s EventsProcessed# 250892 clients.size# 1 Clock# 1970-01-01T00:21:53.744854Z elapsed# 0.242786s EventsProcessed# 253004 clients.size# 1 Clock# 1970-01-01T00:22:08.990604Z elapsed# 0.244234s EventsProcessed# 254930 clients.size# 1 Clock# 1970-01-01T00:22:24.946955Z elapsed# 0.245655s EventsProcessed# 256794 clients.size# 1 Clock# 1970-01-01T00:22:42.526534Z elapsed# 0.249349s EventsProcessed# 260965 clients.size# 2 Clock# 1970-01-01T00:22:58.782440Z elapsed# 0.253264s EventsProcessed# 264680 clients.size# 2 Clock# 1970-01-01T00:23:11.313211Z elapsed# 0.256820s EventsProcessed# 267680 clients.size# 2 Clock# 1970-01-01T00:23:30.495952Z elapsed# 0.262192s EventsProcessed# 272178 clients.size# 2 Clock# 1970-01-01T00:23:41.979278Z elapsed# 0.265341s EventsProcessed# 274905 clients.size# 2 Clock# 1970-01-01T00:23:53.942327Z elapsed# 0.267519s EventsProcessed# 277798 clients.size# 2 Clock# 1970-01-01T00:24:04.652912Z elapsed# 0.270028s EventsProcessed# 280308 clients.size# 2 Clock# 1970-01-01T00:24:21.341076Z elapsed# 0.273707s EventsProcessed# 284083 clients.size# 2 Clock# 1970-01-01T00:24:34.117427Z elapsed# 0.275267s EventsProcessed# 285576 clients.size# 1 Clock# 1970-01-01T00:24:51.676948Z elapsed# 0.279140s EventsProcessed# 289704 clients.size# 2 Clock# 1970-01-01T00:25:06.982813Z elapsed# 0.281922s EventsProcessed# 293326 clients.size# 2 Clock# 1970-01-01T00:25:24.030363Z elapsed# 0.285695s EventsProcessed# 297360 clients.size# 2 Clock# 1970-01-01T00:25:41.854955Z elapsed# 0.289492s EventsProcessed# 301650 clients.size# 2 Clock# 1970-01-01T00:25:56.594714Z elapsed# 0.292409s EventsProcessed# 305257 clients.size# 2 Clock# 1970-01-01T00:26:10.199579Z elapsed# 0.293875s EventsProcessed# 307002 clients.size# 1 Clock# 1970-01-01T00:26:22.265607Z elapsed# 0.293901s EventsProcessed# 307004 clients.size# 0 Clock# 1970-01-01T00:26:37.620335Z elapsed# 0.293908s EventsProcessed# 307004 clients.size# 0 Clock# 1970-01-01T00:26:55.304921Z elapsed# 0.293913s EventsProcessed# 307004 clients.size# 0 Clock# 1970-01-01T00:27:12.539931Z elapsed# 0.295804s EventsProcessed# 309084 clients.size# 1 Clock# 1970-01-01T00:27:29.096399Z elapsed# 0.298026s EventsProcessed# 311060 clients.size# 1 Clock# 1970-01-01T00:27:47.995756Z elapsed# 0.300248s EventsProcessed# 313326 clients.size# 1 Clock# 1970-01-01T00:28:05.776623Z elapsed# 0.302010s EventsProcessed# 315361 clients.size# 1 Clock# 1970-01-01T00:28:24.890389Z elapsed# 0.304379s EventsProcessed# 317746 clients.size# 1 Clock# 1970-01-01T00:28:36.072633Z elapsed# 0.307184s EventsProcessed# 320475 clients.size# 2 Clock# 1970-01-01T00:28:48.880213Z elapsed# 0.310119s EventsProcessed# 323335 clients.size# 2 Clock# 1970-01-01T00:28:58.889925Z elapsed# 0.312276s EventsProcessed# 325673 clients.size# 2 Clock# 1970-01-01T00:29:15.657515Z elapsed# 0.315604s EventsProcessed# 329788 clients.size# 2 Clock# 1970-01-01T00:29:34.359302Z elapsed# 0.321800s EventsProcessed# 336544 clients.size# 3 Clock# 1970-01-01T00:29:49.402420Z elapsed# 0.326876s EventsProcessed# 341930 clients.size# 3 Clock# 1970-01-01T00:30:01.643446Z elapsed# 0.330284s EventsProcessed# 346251 clients.size# 3 Clock# 1970-01-01T00:30:20.339937Z elapsed# 0.335617s EventsProcessed# 352707 clients.size# 3 Clock# 1970-01-01T00:30:34.426303Z elapsed# 0.340503s EventsProcessed# 357713 clients.size# 3 Clock# 1970-01-01T00:30:51.681066Z elapsed# 0.345663s EventsProcessed# 363812 clients.size# 3 Clock# 1970-01-01T00:31:08.779349Z elapsed# 0.351268s EventsProcessed# 370018 clients.size# 3 Clock# 1970-01-01T00:31:23.379757Z elapsed# 0.356755s EventsProcessed# 375208 clients.size# 3 Clock# 1970-01-01T00:31:35.347936Z elapsed# 0.362162s EventsProcessed# 379529 clients.size# 3 Clock# 1970-01-01T00:31:52.356348Z elapsed# 0.369291s EventsProcessed# 3857 ... 63790Z elapsed# 17.099217s EventsProcessed# 11861741 clients.size# 4 Clock# 1970-01-01T05:28:12.622915Z elapsed# 17.106055s EventsProcessed# 11868628 clients.size# 3 Clock# 1970-01-01T05:28:32.188050Z elapsed# 17.113304s EventsProcessed# 11875723 clients.size# 3 Clock# 1970-01-01T05:28:46.899547Z elapsed# 17.119530s EventsProcessed# 11880950 clients.size# 3 Clock# 1970-01-01T05:29:00.155502Z elapsed# 17.124842s EventsProcessed# 11885629 clients.size# 3 Clock# 1970-01-01T05:29:17.038234Z elapsed# 17.131444s EventsProcessed# 11891533 clients.size# 3 Clock# 1970-01-01T05:29:33.302065Z elapsed# 17.139785s EventsProcessed# 11897238 clients.size# 3 Clock# 1970-01-01T05:29:48.156843Z elapsed# 17.145864s EventsProcessed# 11902638 clients.size# 3 Clock# 1970-01-01T05:30:01.963278Z elapsed# 17.150769s EventsProcessed# 11907476 clients.size# 3 Clock# 1970-01-01T05:30:12.606891Z elapsed# 17.154249s EventsProcessed# 11911176 clients.size# 3 Clock# 1970-01-01T05:30:25.583293Z elapsed# 17.158751s EventsProcessed# 11915751 clients.size# 3 Clock# 1970-01-01T05:30:40.899292Z elapsed# 17.164158s EventsProcessed# 11921160 clients.size# 3 Clock# 1970-01-01T05:30:51.432528Z elapsed# 17.168261s EventsProcessed# 11924940 clients.size# 3 Clock# 1970-01-01T05:31:10.517179Z elapsed# 17.176343s EventsProcessed# 11931726 clients.size# 3 Clock# 1970-01-01T05:31:27.985877Z elapsed# 17.184073s EventsProcessed# 11937965 clients.size# 3 Clock# 1970-01-01T05:31:43.316992Z elapsed# 17.189383s EventsProcessed# 11943421 clients.size# 3 Clock# 1970-01-01T05:31:58.980662Z elapsed# 17.193258s EventsProcessed# 11947193 clients.size# 2 Clock# 1970-01-01T05:32:16.761291Z elapsed# 17.197851s EventsProcessed# 11951388 clients.size# 2 Clock# 1970-01-01T05:32:29.743592Z elapsed# 17.201598s EventsProcessed# 11954467 clients.size# 2 Clock# 1970-01-01T05:32:42.519695Z elapsed# 17.204776s EventsProcessed# 11957446 clients.size# 2 Clock# 1970-01-01T05:32:58.964525Z elapsed# 17.210139s EventsProcessed# 11961301 clients.size# 2 Clock# 1970-01-01T05:33:16.242230Z elapsed# 17.215821s EventsProcessed# 11965312 clients.size# 2 Clock# 1970-01-01T05:33:30.763435Z elapsed# 17.220077s EventsProcessed# 11968808 clients.size# 2 Clock# 1970-01-01T05:33:48.190033Z elapsed# 17.224517s EventsProcessed# 11973042 clients.size# 2 Clock# 1970-01-01T05:34:07.961726Z elapsed# 17.229403s EventsProcessed# 11977626 clients.size# 2 Clock# 1970-01-01T05:34:23.097839Z elapsed# 17.233293s EventsProcessed# 11981261 clients.size# 2 Clock# 1970-01-01T05:34:35.021285Z elapsed# 17.236543s EventsProcessed# 11984142 clients.size# 2 Clock# 1970-01-01T05:34:54.666431Z elapsed# 17.241623s EventsProcessed# 11988794 clients.size# 2 Clock# 1970-01-01T05:35:05.173607Z elapsed# 17.244029s EventsProcessed# 11991280 clients.size# 2 Clock# 1970-01-01T05:35:23.935375Z elapsed# 17.249120s EventsProcessed# 11995567 clients.size# 2 Clock# 1970-01-01T05:35:42.496068Z elapsed# 17.253161s EventsProcessed# 11999784 clients.size# 2 Clock# 1970-01-01T05:35:58.282754Z elapsed# 17.256763s EventsProcessed# 12003567 clients.size# 2 Clock# 1970-01-01T05:36:09.459597Z elapsed# 17.259311s EventsProcessed# 12006240 clients.size# 2 Clock# 1970-01-01T05:36:28.004776Z elapsed# 17.261404s EventsProcessed# 12008417 clients.size# 1 Clock# 1970-01-01T05:36:42.830085Z elapsed# 17.263411s EventsProcessed# 12010284 clients.size# 1 Clock# 1970-01-01T05:36:59.633865Z elapsed# 17.265261s EventsProcessed# 12012310 clients.size# 1 Clock# 1970-01-01T05:37:14.090327Z elapsed# 17.266955s EventsProcessed# 12014081 clients.size# 1 Clock# 1970-01-01T05:37:28.505214Z elapsed# 17.268600s EventsProcessed# 12015798 clients.size# 1 Clock# 1970-01-01T05:37:48.133359Z elapsed# 17.268655s EventsProcessed# 12015800 clients.size# 0 Clock# 1970-01-01T05:38:01.586770Z elapsed# 17.268660s EventsProcessed# 12015800 clients.size# 0 Clock# 1970-01-01T05:38:15.734663Z elapsed# 17.268664s EventsProcessed# 12015800 clients.size# 0 Clock# 1970-01-01T05:38:30.615762Z elapsed# 17.270236s EventsProcessed# 12017401 clients.size# 1 Clock# 1970-01-01T05:38:49.792735Z elapsed# 17.272462s EventsProcessed# 12019696 clients.size# 1 Clock# 1970-01-01T05:39:07.305456Z elapsed# 17.274305s EventsProcessed# 12021715 clients.size# 1 Clock# 1970-01-01T05:39:26.703764Z elapsed# 17.277258s EventsProcessed# 12024080 clients.size# 1 Clock# 1970-01-01T05:39:36.841370Z elapsed# 17.279026s EventsProcessed# 12025267 clients.size# 1 Clock# 1970-01-01T05:39:56.353783Z elapsed# 17.282852s EventsProcessed# 12027589 clients.size# 1 Clock# 1970-01-01T05:40:16.065998Z elapsed# 17.286314s EventsProcessed# 12029920 clients.size# 1 Clock# 1970-01-01T05:40:26.339727Z elapsed# 17.287703s EventsProcessed# 12031134 clients.size# 1 Clock# 1970-01-01T05:40:41.290845Z elapsed# 17.289602s EventsProcessed# 12032927 clients.size# 1 Clock# 1970-01-01T05:41:01.285922Z elapsed# 17.292753s EventsProcessed# 12035264 clients.size# 1 Clock# 1970-01-01T05:41:18.434769Z elapsed# 17.295508s EventsProcessed# 12037319 clients.size# 1 Clock# 1970-01-01T05:41:38.157760Z elapsed# 17.298514s EventsProcessed# 12039712 clients.size# 1 Clock# 1970-01-01T05:41:52.683545Z elapsed# 17.301074s EventsProcessed# 12041413 clients.size# 1 Clock# 1970-01-01T05:42:11.065430Z elapsed# 17.303308s EventsProcessed# 12043570 clients.size# 1 Clock# 1970-01-01T05:42:23.847854Z elapsed# 17.304707s EventsProcessed# 12044995 clients.size# 1 Clock# 1970-01-01T05:42:36.554323Z elapsed# 17.307781s EventsProcessed# 12047993 clients.size# 2 Clock# 1970-01-01T05:42:55.265396Z elapsed# 17.312550s EventsProcessed# 12052427 clients.size# 2 Clock# 1970-01-01T05:43:10.545235Z elapsed# 17.316530s EventsProcessed# 12056062 clients.size# 2 Clock# 1970-01-01T05:43:23.272320Z elapsed# 17.319802s EventsProcessed# 12059043 clients.size# 2 Clock# 1970-01-01T05:43:42.971689Z elapsed# 17.324497s EventsProcessed# 12063746 clients.size# 2 Clock# 1970-01-01T05:43:55.563679Z elapsed# 17.327558s EventsProcessed# 12066836 clients.size# 2 Clock# 1970-01-01T05:44:12.636766Z elapsed# 17.331952s EventsProcessed# 12070846 clients.size# 2 Clock# 1970-01-01T05:44:24.544764Z elapsed# 17.335026s EventsProcessed# 12073798 clients.size# 2 Clock# 1970-01-01T05:44:35.161407Z elapsed# 17.337579s EventsProcessed# 12076277 clients.size# 2 Clock# 1970-01-01T05:44:47.796621Z elapsed# 17.341298s EventsProcessed# 12079214 clients.size# 2 Clock# 1970-01-01T05:44:59.459991Z elapsed# 17.347706s EventsProcessed# 12083483 clients.size# 3 Clock# 1970-01-01T05:45:14.390183Z elapsed# 17.355571s EventsProcessed# 12088872 clients.size# 3 Clock# 1970-01-01T05:45:28.223900Z elapsed# 17.362224s EventsProcessed# 12093806 clients.size# 3 Clock# 1970-01-01T05:45:41.604842Z elapsed# 17.368992s EventsProcessed# 12098556 clients.size# 3 Clock# 1970-01-01T05:45:55.039990Z elapsed# 17.374781s EventsProcessed# 12103366 clients.size# 3 Clock# 1970-01-01T05:46:08.021312Z elapsed# 17.380317s EventsProcessed# 12108081 clients.size# 3 Clock# 1970-01-01T05:46:27.475446Z elapsed# 17.385333s EventsProcessed# 12112637 clients.size# 2 Clock# 1970-01-01T05:46:41.778802Z elapsed# 17.390847s EventsProcessed# 12117643 clients.size# 3 Clock# 1970-01-01T05:47:00.076613Z elapsed# 17.398184s EventsProcessed# 12124279 clients.size# 3 Clock# 1970-01-01T05:47:15.729605Z elapsed# 17.403649s EventsProcessed# 12129834 clients.size# 3 Clock# 1970-01-01T05:47:31.965031Z elapsed# 17.409614s EventsProcessed# 12135794 clients.size# 3 Clock# 1970-01-01T05:47:42.979690Z elapsed# 17.413597s EventsProcessed# 12139705 clients.size# 3 Clock# 1970-01-01T05:47:58.649850Z elapsed# 17.419114s EventsProcessed# 12145345 clients.size# 3 Clock# 1970-01-01T05:48:15.362638Z elapsed# 17.429640s EventsProcessed# 12153294 clients.size# 4 Clock# 1970-01-01T05:48:26.614225Z elapsed# 17.437339s EventsProcessed# 12159919 clients.size# 5 Clock# 1970-01-01T05:48:42.423637Z elapsed# 17.449652s EventsProcessed# 12169617 clients.size# 5 Clock# 1970-01-01T05:49:01.645807Z elapsed# 17.466714s EventsProcessed# 12181146 clients.size# 5 Clock# 1970-01-01T05:49:14.360173Z elapsed# 17.473019s EventsProcessed# 12187106 clients.size# 4 Clock# 1970-01-01T05:49:31.465556Z elapsed# 17.482016s EventsProcessed# 12195207 clients.size# 4 Clock# 1970-01-01T05:49:47.194228Z elapsed# 17.490681s EventsProcessed# 12202697 clients.size# 4 Clock# 1970-01-01T05:49:57.864806Z elapsed# 17.498022s EventsProcessed# 12207767 clients.size# 4 Clock# 1970-01-01T05:50:12.624517Z elapsed# 17.505092s EventsProcessed# 12214938 clients.size# 4 Clock# 1970-01-01T05:50:30.143054Z elapsed# 17.516544s EventsProcessed# 12223096 clients.size# 4 Clock# 1970-01-01T05:50:48.860865Z elapsed# 17.530938s EventsProcessed# 12231943 clients.size# 4 Clock# 1970-01-01T05:51:06.915067Z elapsed# 17.545317s EventsProcessed# 12240465 clients.size# 4 Clock# 1970-01-01T05:51:21.970968Z elapsed# 17.556389s EventsProcessed# 12247576 clients.size# 4 Clock# 1970-01-01T05:51:39.219905Z elapsed# 17.563344s EventsProcessed# 12253628 clients.size# 3 Clock# 1970-01-01T05:51:52.356571Z elapsed# 17.570664s EventsProcessed# 12259925 clients.size# 4 Clock# 1970-01-01T05:52:10.964934Z elapsed# 17.579777s EventsProcessed# 12268646 clients.size# 4 Clock# 1970-01-01T05:52:29.186607Z elapsed# 17.592449s EventsProcessed# 12277400 clients.size# 4 Clock# 1970-01-01T05:52:39.955301Z elapsed# 17.600708s EventsProcessed# 12282470 clients.size# 4 Clock# 1970-01-01T05:52:56.824055Z elapsed# 17.612451s EventsProcessed# 12290512 clients.size# 4 Clock# 1970-01-01T05:53:14.199681Z elapsed# 17.625525s EventsProcessed# 12298869 clients.size# 4 Clock# 1970-01-01T05:53:30.317070Z elapsed# 17.636977s EventsProcessed# 12306685 clients.size# 4 Clock# 1970-01-01T05:53:42.900100Z elapsed# 17.646350s EventsProcessed# 12312610 clients.size# 4 Clock# 1970-01-01T05:53:58.255036Z elapsed# 17.658588s EventsProcessed# 12319936 clients.size# 4 Clock# 1970-01-01T05:54:09.410610Z elapsed# 17.665633s EventsProcessed# 12325169 clients.size# 4 Clock# 1970-01-01T05:54:27.027535Z elapsed# 17.675296s EventsProcessed# 12333656 clients.size# 4 Clock# 1970-01-01T05:54:42.197889Z elapsed# 17.685556s EventsProcessed# 12341046 clients.size# 4 Clock# 1970-01-01T05:54:59.964838Z elapsed# 17.697459s EventsProcessed# 12349610 clients.size# 4 Clock# 1970-01-01T05:55:18.783717Z elapsed# 17.709258s EventsProcessed# 12358592 clients.size# 4 Clock# 1970-01-01T05:55:36.703286Z elapsed# 17.719872s EventsProcessed# 12367160 clients.size# 4 Clock# 1970-01-01T05:55:52.416336Z elapsed# 17.729994s EventsProcessed# 12374665 clients.size# 4 Clock# 1970-01-01T05:56:12.184904Z elapsed# 17.740255s EventsProcessed# 12383978 clients.size# 4 Clock# 1970-01-01T05:56:30.912662Z elapsed# 17.750710s EventsProcessed# 12392876 clients.size# 4 Clock# 1970-01-01T05:56:43.074327Z elapsed# 17.757988s EventsProcessed# 12398563 clients.size# 4 Clock# 1970-01-01T05:56:56.788255Z elapsed# 17.767002s EventsProcessed# 12405168 clients.size# 4 Clock# 1970-01-01T05:57:11.279859Z elapsed# 17.775193s EventsProcessed# 12412211 clients.size# 4 Clock# 1970-01-01T05:57:28.086513Z elapsed# 17.785262s EventsProcessed# 12420142 clients.size# 4 Clock# 1970-01-01T05:57:41.746148Z elapsed# 17.791346s EventsProcessed# 12425073 clients.size# 3 Clock# 1970-01-01T05:57:55.812394Z elapsed# 17.796941s EventsProcessed# 12430095 clients.size# 3 Clock# 1970-01-01T05:58:12.789414Z elapsed# 17.804066s EventsProcessed# 12436185 clients.size# 3 Clock# 1970-01-01T05:58:29.272692Z elapsed# 17.810505s EventsProcessed# 12442023 clients.size# 3 Clock# 1970-01-01T05:58:48.051363Z elapsed# 17.815645s EventsProcessed# 12446495 clients.size# 2 Clock# 1970-01-01T05:59:07.769253Z elapsed# 17.821079s EventsProcessed# 12451044 clients.size# 2 Clock# 1970-01-01T05:59:19.275181Z elapsed# 17.824660s EventsProcessed# 12453817 clients.size# 2 Clock# 1970-01-01T05:59:34.643156Z elapsed# 17.829933s EventsProcessed# 12457453 clients.size# 2 Clock# 1970-01-01T05:59:54.208431Z elapsed# 17.838963s EventsProcessed# 12464533 clients.size# 3 >> DQCBO::Empty [GOOD] >> HypergraphBuild::SimpleChain3NodesTransitiveClosure [GOOD] >> HypergraphBuild::SimpleDimpleJoin [GOOD] >> DQCBO::RelCollectorBrokenEquiJoin [GOOD] >> HypergraphBuild::SimpleChain5NodesTransitiveClosure [GOOD] >> HypergraphBuild::IsReorderableConstraint >> DQCBO::DqOptimizeEquiJoinWithCostsNative [GOOD] >> DQCBO::DqOptimizeEquiJoinWithCostsPG [GOOD] >> HypergraphBuild::SimpleChain4NodesTransitiveClosure [GOOD] >> HypergraphBuild::AnyJoinConstraints1 [GOOD] >> HypergraphBuild::AnyJoinConstraints2 [GOOD] >> HypergraphBuild::AnyJoinConstraints3 [GOOD] >> HypergraphBuild::IsReorderableConstraint [GOOD] >> HypergraphBuild::JoinKindConflictSimple [GOOD] >> HypergraphBuild::SimpleCycle [GOOD] >> HypergraphBuild::TransitiveClosurePlusCycle [GOOD] >> HypergraphBuild::CondsThatMayCauseATransitiveClosureButTheyMustNot [GOOD] >> HypergraphBuild::TransitiveClosureManyCondsBetweenJoin [GOOD] >> HypergraphBuild::ManyCondsBetweenJoinForTransitiveClosure [GOOD] >> HypergraphBuild::JoinTopologiesBenchmark [GOOD] >> InterestingOrderingsShuffle::TwoOneItemEquivOnly [GOOD] >> InterestingOrderingsShuffle::ManyOneItemEquivOnly [GOOD] >> InterestingOrderingsShuffle::ConsideringOldFDs [GOOD] >> InterestingOrderingsShuffle::Join64ChainImitation >> result_convert::import_test [GOOD] >> InterestingOrderingsShuffle::Join64ChainImitation [GOOD] >> InterestingOrderingsShuffle::ManyItems [GOOD] >> InterestingOrderingsShuffle::PruningFDs [GOOD] >> corobenchmark::FuncCalls [GOOD] >> corobenchmark::TaskCalls [GOOD] >> corobenchmark::CoroAwaits [GOOD] >> HugeCluster::AllToAll [GOOD] |74.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/workload/tpch/ut/ydb-library-workload-tpch-ut >> HugeCluster::AllToOne >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] |74.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut |74.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_convert/import_test >> result_convert::import_test [GOOD] |73.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/cppcoro/corobenchmark/corobenchmark >> corobenchmark::CoroAwaits [GOOD] |73.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/accessor/sub_columns/ut/ydb-core-formats-arrow-accessor-sub_columns-ut >> test.py::test[solomon-Subquery-default.txt] [GOOD] >> test.py::test[solomon-UnknownSetting-] >> result_compare::import_test [GOOD] >> Init::TWithDefaultParser [GOOD] >> SubColumnsArrayAccessor::EmptyOthers [GOOD] >> SubColumnsArrayAccessor::SlicesDef [GOOD] >> SubColumnsArrayAccessor::FiltersDef [GOOD] |73.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/result_compare/import_test >> result_compare::import_test [GOOD] >> TBtreeIndexTPartLarge::SmallKeys1GB [GOOD] >> TBtreeIndexTPartLarge::MiddleKeys1GB |73.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/simple_json_diff >> test.py::test[solomon-UnknownSetting-] [GOOD] |73.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/init/ut/unittest >> Init::TWithDefaultParser [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/accessor/sub_columns/ut/unittest >> SubColumnsArrayAccessor::FiltersDef [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint32;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint8;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint8;records=0;count=0; {"internal":{"columns_data":{"stats":{"accessor":[],"size":[],"key_names":[],"records":[]},"records":{"records_count":6,"schema":""}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":0,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[1,5,1,5,1],"size":[3,1,3,1,9],"key_names":["a","a1","b","b1","c"],"records":[3,1,3,1,3]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 0,\n 0,\n 2,\n 2,\n 2,\n 3,\n 3,\n 3,\n 5,\n 5\n ]\n]"},"records_count":11,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 2,\n 4,\n 1,\n 2,\n 4,\n 0,\n 2,\n 4,\n 0,\n 3\n ]\n]"},"records_count":11,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"1\",\n \"111\",\n \"2\",\n \"2\",\n \"222\",\n \"3\",\n \"3\",\n \"333\",\n \"5\",\n \"5\"\n ]\n]"},"records_count":11,"type":"Array"}],"records_count":11,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1],"size":[9],"key_names":["c"],"records":[3]},"records":{"records_count":6,"schema":"c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":1,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[1,5,1,5],"size":[3,1,3,1],"key_names":["a","a1","b","b1"],"records":[3,1,3,1]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 0,\n 2,\n 2,\n 3,\n 3,\n 5,\n 5\n ]\n]"},"records_count":8,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 2,\n 1,\n 2,\n 0,\n 2,\n 0,\n 3\n ]\n]"},"records_count":8,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"1\",\n \"2\",\n \"2\",\n \"3\",\n \"3\",\n \"5\",\n \"5\"\n ]\n]"},"records_count":8,"type":"Array"}],"records_count":8,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,1],"size":[3,9],"key_names":["a","c"],"records":[3,3]},"records":{"records_count":6,"schema":"a: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":2,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5,1,5],"size":[1,3,1],"key_names":["a1","b","b1"],"records":[1,3,1]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 2,\n 2,\n 3,\n 5\n ]\n]"},"records_count":5,"type":"Array"},{"internal":{"data":"[\n [\n 1,\n 0,\n 1,\n 1,\n 2\n ]\n]"},"records_count":5,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"2\",\n \"2\",\n \"3\",\n \"5\"\n ]\n]"},"records_count":5,"type":"Array"}],"records_count":5,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,1,1],"size":[3,3,9],"key_names":["a","b","c"],"records":[3,3,3]},"records":{"records_count":6,"schema":"a: string;b: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":3,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5,5],"size":[1,1],"key_names":["a1","b1"],"records":[1,1]},"records":{"data":[{"internal":{"data":"[\n [\n 2,\n 5\n ]\n]"},"records_count":2,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 1\n ]\n]"},"records_count":2,"type":"Array"},{"internal":{"data":"[\n [\n \"2\",\n \"5\"\n ]\n]"},"records_count":2,"type":"Array"}],"records_count":2,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,5,1,1],"size":[3,1,3,9],"key_names":["a","a1","b","c"],"records":[3,1,3,3]},"records":{"records_count":6,"schema":"a: string;a1: string;b: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":4,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5],"size":[1],"key_names":["b1"],"records":[1]},"records":{"data":[{"internal":{"data":"[\n [\n 5\n ]\n]"},"records_count":1,"type":"Array"},{"internal":{"data":"[\n [\n 0\n ]\n]"},"records_count":1,"type":"Array"},{"internal":{"data":"[\n [\n \"5\"\n ]\n]"},"records_count":1,"type":"Array"}],"records_count":1,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=2;size=9; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=3;size=13; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=3;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=3;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=3; {"internal":{"columns_data":{"stats":{"accessor":[],"size":[],"key_names":[],"records":[]},"records":{"records_count":6,"schema":""}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":0,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[1,5,1,5,1],"size":[3,1,3,1,9],"key_names":["a","a1","b","b1","c"],"records":[3,1,3,1,3]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 0,\n 0,\n 2,\n 2,\n 2,\n 3,\n 3,\n 3,\n 5,\n 5\n ]\n]"},"records_count":11,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 2,\n 4,\n 1,\n 2,\n 4,\n 0,\n 2,\n 4,\n 0,\n 3\n ]\n]"},"records_count":11,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"1\",\n \"111\",\n \"2\",\n \"2\",\n \"222\",\n \"3\",\n \"3\",\n \"333\",\n \"5\",\n \"5\"\n ]\n]"},"records_count":11,"type":"Array"}],"records_count":11,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1],"size":[9],"key_names":["c"],"records":[3]},"records":{"records_count":6,"schema":"c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":1,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[1,5,1,5],"size":[3,1,3,1],"key_names":["a","a1","b","b1"],"records":[3,1,3,1]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 0,\n 2,\n 2,\n 3,\n 3,\n 5,\n 5\n ]\n]"},"records_count":8,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 2,\n 1,\n 2,\n 0,\n 2,\n 0,\n 3\n ]\n]"},"records_count":8,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"1\",\n \"2\",\n \"2\",\n \"3\",\n \"3\",\n \"5\",\n \"5\"\n ]\n]"},"records_count":8,"type":"Array"}],"records_count":8,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,1],"size":[3,9],"key_names":["a","c"],"records":[3,3]},"records":{"records_count":6,"schema":"a: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":2,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5,1,5],"size":[1,3,1],"key_names":["a1","b","b1"],"records":[1,3,1]},"records":{"data":[{"internal":{"data":"[\n [\n 0,\n 2,\n 2,\n 3,\n 5\n ]\n]"},"records_count":5,"type":"Array"},{"internal":{"data":"[\n [\n 1,\n 0,\n 1,\n 1,\n 2\n ]\n]"},"records_count":5,"type":"Array"},{"internal":{"data":"[\n [\n \"1\",\n \"2\",\n \"2\",\n \"3\",\n \"5\"\n ]\n]"},"records_count":5,"type":"Array"}],"records_count":5,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,1,1],"size":[3,3,9],"key_names":["a","b","c"],"records":[3,3,3]},"records":{"records_count":6,"schema":"a: string;b: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":3,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5,5],"size":[1,1],"key_names":["a1","b1"],"records":[1,1]},"records":{"data":[{"internal":{"data":"[\n [\n 2,\n 5\n ]\n]"},"records_count":2,"type":"Array"},{"internal":{"data":"[\n [\n 0,\n 1\n ]\n]"},"records_count":2,"type":"Array"},{"internal":{"data":"[\n [\n \"2\",\n \"5\"\n ]\n]"},"records_count":2,"type":"Array"}],"records_count":2,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} {"internal":{"columns_data":{"stats":{"accessor":[1,5,1,1],"size":[3,1,3,9],"key_names":["a","a1","b","c"],"records":[3,1,3,3]},"records":{"records_count":6,"schema":"a: string;a1: string;b: string;c: string;"}},"settings":{"memory_limit":0,"data_extractor":{"class_name":"JSON_SCANNER","details":{}},"sparsed_detector_kff":4,"columns_limit":4,"others_allowed_fraction":0},"others_data":{"stats":{"accessor":[5],"size":[1],"key_names":["b1"],"records":[1]},"records":{"data":[{"internal":{"data":"[\n [\n 5\n ]\n]"},"records_count":1,"type":"Array"},{"internal":{"data":"[\n [\n 0\n ]\n]"},"records_count":1,"type":"Array"},{"internal":{"data":"[\n [\n \"5\"\n ]\n]"},"records_count":1,"type":"Array"}],"records_count":1,"schema":"record_idx: uint32;key: uint32;value: string;"}}},"records_count":6,"type":"SubColumnsArray"} FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=3;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=3; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=3; >> ydb-tests-functional-clickbench::import_test [GOOD] |73.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/clickbench/import_test >> ydb-tests-functional-clickbench::import_test [GOOD] |72.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/dq/actors/ut/ydb-library-yql-providers-dq-actors-ut |72.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |72.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/formats/arrow/accessor/sparsed/ut/ydb-core-formats-arrow-accessor-sparsed-ut |72.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/solomon/pytest >> test.py::test[solomon-UnknownSetting-] [GOOD] |72.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so >> TBsLocalRecovery::ChaoticWriteRestartHugeXXX [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased |72.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so >> SharedThreads::RegistrationAndPassingAwayActorsCommon [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsLazy ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/opt/ut/unittest >> InterestingOrderingsShuffle::PruningFDs [GOOD] Test command err: Input: Rels: [{rows: 1,cost: 0,vars: [(1,1)]}, {rows: 1,cost: 0,vars: [(2,1)]}] EqClasses: [[(1,1),(2,1)]] Input: 1: {RELOPTINFO :reloptkind 0 :relids (b 1) :rows 1 :consider_startup false :consider_param_startup false :consider_parallel false :reltarget {PATHTARGET :exprs ( {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } ) :sortgrouprefs <> :cost.startup 0 :cost.per_tuple 0 :width 8 :has_volatile_expr 0 } :pathlist ( {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } ) :ppilist <> :partial_pathlist <> :cheapest_startup_path <> :cheapest_total_path {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :cheapest_unique_path <> :cheapest_parameterized_paths <> :direct_lateral_relids (b) :lateral_relids (b) :relid 1 :reltablespace 0 :rtekind 0 :min_attr 0 :max_attr 1 :nulling_relids (b) :lateral_vars <> :lateral_referencers (b) :indexlist <> :statlist <> :pages 1 :tuples 1 :allvisfrac 1 :eclass_indexes (b) :subroot <> :subplan_params <> :rel_parallel_workers -1 :amflags 1 :serverid 0 :userid 0 :useridiscurrent false :unique_for_rels <> :non_unique_for_rels <> :baserestrictinfo <> :baserestrictcost.startup 0 :baserestrictcost.per_tuple 0 :baserestrict_min_security 0 :joininfo <> :has_eclass_joins false :consider_partitionwise_join false :top_parent_relids (b) :nparts 0 :partbounds_merged false :partition_qual <> :live_parts (b) :all_partrels (b) } Input: 2: {RELOPTINFO :reloptkind 0 :relids (b 2) :rows 1 :consider_startup false :consider_param_startup false :consider_parallel false :reltarget {PATHTARGET :exprs ( {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } ) :sortgrouprefs <> :cost.startup 0 :cost.per_tuple 0 :width 8 :has_volatile_expr 0 } :pathlist ( {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } ) :ppilist <> :partial_pathlist <> :cheapest_startup_path <> :cheapest_total_path {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :cheapest_unique_path <> :cheapest_parameterized_paths <> :direct_lateral_relids (b) :lateral_relids (b) :relid 2 :reltablespace 0 :rtekind 0 :min_attr 0 :max_attr 1 :nulling_relids (b) :lateral_vars <> :lateral_referencers (b) :indexlist <> :statlist <> :pages 1 :tuples 1 :allvisfrac 1 :eclass_indexes (b) :subroot <> :subplan_params <> :rel_parallel_workers -1 :amflags 1 :serverid 0 :userid 0 :useridiscurrent false :unique_for_rels <> :non_unique_for_rels <> :baserestrictinfo <> :baserestrictcost.startup 0 :baserestrictcost.per_tuple 0 :baserestrict_min_security 0 :joininfo <> :has_eclass_joins false :consider_partitionwise_join false :top_parent_relids (b) :nparts 0 :partbounds_merged false :partition_qual <> :live_parts (b) :all_partrels (b) } Context: : {PLANNERINFO :parse <> :glob <> :query_level 1 :plan_params <> :outer_params (b) :simple_rel_array ( <> {RELOPTINFO :reloptkind 0 :relids (b 1) :rows 1 :consider_startup false :consider_param_startup false :consider_parallel false :reltarget {PATHTARGET :exprs ( {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } ) :sortgrouprefs <> :cost.startup 0 :cost.per_tuple 0 :width 8 :has_volatile_expr 0 } :pathlist ( {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } ) :ppilist <> :partial_pathlist <> :cheapest_startup_path <> :cheapest_total_path {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :cheapest_unique_path <> :cheapest_parameterized_paths <> :direct_lateral_relids (b) :lateral_relids (b) :relid 1 :reltablespace 0 :rtekind 0 :min_attr 0 :max_attr 1 :nulling_relids (b) :lateral_vars <> :lateral_referencers (b) :indexlist <> :statlist <> :pages 1 :tuples 1 :allvisfrac 1 :eclass_indexes (b 0) :subroot <> :subplan_params <> :rel_parallel_workers -1 :amflags 1 :serverid 0 :userid 0 :useridiscurrent false :unique_for_rels <> :non_unique_for_rels <> :baserestrictinfo <> :baserestrictcost.startup 0 :baserestrictcost.per_tuple 0 :baserestrict_min_security 0 :joininfo <> :has_eclass_joins false :consider_partitionwise_join false :top_parent_relids (b) :nparts 0 :partbounds_merged false :partition_qual <> :live_parts (b) :all_partrels (b) } {RELOPTINFO :reloptkind 0 :relids (b 2) :rows 1 :consider_startup false :consider_param_startup false :consider_parallel false :reltarget {PATHTARGET :exprs ( {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } ) :sortgrouprefs <> :cost.startup 0 :cost.per_tuple 0 :width 8 :has_volatile_expr 0 } :pathlist ( {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } ) :ppilist <> :partial_pathlist <> :cheapest_startup_path <> :cheapest_total_path {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :cheapest_unique_path <> :cheapest_parameterized_paths <> :direct_lateral_relids (b) :lateral_relids (b) :relid 2 :reltablespace 0 :rtekind 0 :min_attr 0 :max_attr 1 :nulling_relids (b) :lateral_vars <> :lateral_referencers (b) :indexlist <> :statlist <> :pages 1 :tuples 1 :allvisfrac 1 :eclass_indexes (b 0) :subroot <> :subplan_params <> :rel_parallel_workers -1 :amflags 1 :serverid 0 :userid 0 :useridiscurrent false :unique_for_rels <> :non_unique_for_rels <> :baserestrictinfo <> :baserestrictcost.startup 0 :baserestrictcost.per_tuple 0 :baserestrict_min_security 0 :joininfo <> :has_eclass_joins false :consider_partitionwise_join false :top_parent_relids (b) :nparts 0 :partbounds_merged false :partition_qual <> :live_parts (b) :all_partrels (b) } ) :simple_rel_array_size 3 :all_baserels (b 1 2) :outer_join_rels (b) :all_query_rels (b) :join_rel_list <> :join_cur_level 0 :init_plans <> :cte_plan_ids <> :multiexpr_params <> :join_domains <> :eq_classes ( {EQUIVALENCECLASS :ec_opfamilies (o 1976) :ec_collation 0 :ec_members ( {EQUIVALENCEMEMBER :em_expr {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } :em_relids (b 1) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } {EQUIVALENCEMEMBER :em_expr {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } :em_relids (b 2) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } ) :ec_sources <> :ec_derives <> :ec_relids (b 1 2) :ec_has_const false :ec_has_volatile false :ec_broken false :ec_sortref 0 :ec_min_security 0 :ec_max_security 0 } ) :ec_merging_done true :canon_pathkeys <> :left_join_clauses <> :right_join_clauses <> :full_join_clauses <> :join_info_list <> :last_rinfo_serial 0 :all_result_relids (b) :leaf_result_relids (b) :append_rel_list <> :row_identity_vars <> :rowMarks <> :placeholder_list <> :fkey_list <> :query_pathkeys <> :group_pathkeys <> :num_groupby_pathkeys 0 :window_pathkeys <> :distinct_pathkeys <> :sort_pathkeys <> :processed_groupClause <> :processed_distinctClause <> :processed_tlist <> :update_colnos <> :minmax_aggs <> :total_table_pages 0 :tuple_fraction 0 :limit_tuples 0 :qual_security_level 0 :hasJoinRTEs false :hasLateralRTEs false :hasHavingQual false :hasPseudoConstantQuals false :hasAlternativeSubPlans false :placeholdersFrozen false :hasRecursion false :agginfos <> :aggtransinfos <> :numOrderedAggs 0 :hasNonPartialAggs false :hasNonSerialAgg ... ids (b 2) :orclause <> :rinfo_serial 1 :eval_cost.startup 0 :eval_cost.per_tuple 100000 :norm_selec 1 :outer_selec -1 :mergeopfamilies <> :left_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } :em_relids (b 1) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :right_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } :em_relids (b 2) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :outer_is_left true :hashjoinoperator 410 :left_bucketsize -1 :right_bucketsize -1 :left_mcvfreq -1 :right_mcvfreq -1 :left_hasheqoperator 98 :right_hasheqoperator 98 } ) } :cheapest_total_path {NESTPATH :jpath.path.pathtype 335 :parent_relids (b 1 2) :required_outer (b) :jpath.path.parallel_aware false :jpath.path.parallel_safe false :jpath.path.parallel_workers 0 :jpath.path.rows 1 :jpath.path.startup_cost 0 :jpath.path.total_cost 100000.015 :jpath.path.pathkeys <> :jpath.jointype 0 :jpath.inner_unique false :jpath.outerjoinpath {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :jpath.innerjoinpath {MATERIALPATH :path.pathtype 339 :parent_relids (b 2) :required_outer (b) :path.parallel_aware false :path.parallel_safe false :path.parallel_workers 0 :path.rows 1 :path.startup_cost 0 :path.total_cost 0.005 :path.pathkeys <> :subpath {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } } :jpath.joinrestrictinfo ( {RESTRICTINFO :clause {OPEXPR :opno 410 :opfuncid 467 :opresulttype 16 :opretset false :opcollid 0 :inputcollid 0 :args ( {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } ) :location -1 } :is_pushed_down true :can_join true :pseudoconstant false :has_clone false :is_clone false :leakproof false :has_volatile 2 :security_level 0 :num_base_rels 2 :clause_relids (b 1 2) :required_relids (b 1 2) :incompatible_relids (b) :outer_relids (b) :left_relids (b 1) :right_relids (b 2) :orclause <> :rinfo_serial 1 :eval_cost.startup 0 :eval_cost.per_tuple 100000 :norm_selec 1 :outer_selec -1 :mergeopfamilies <> :left_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } :em_relids (b 1) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :right_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } :em_relids (b 2) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :outer_is_left true :hashjoinoperator 410 :left_bucketsize -1 :right_bucketsize -1 :left_mcvfreq -1 :right_mcvfreq -1 :left_hasheqoperator 98 :right_hasheqoperator 98 } ) } :cheapest_unique_path <> :cheapest_parameterized_paths ( {NESTPATH :jpath.path.pathtype 335 :parent_relids (b 1 2) :required_outer (b) :jpath.path.parallel_aware false :jpath.path.parallel_safe false :jpath.path.parallel_workers 0 :jpath.path.rows 1 :jpath.path.startup_cost 0 :jpath.path.total_cost 100000.015 :jpath.path.pathkeys <> :jpath.jointype 0 :jpath.inner_unique false :jpath.outerjoinpath {PATH :pathtype 318 :parent_relids (b 1) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } :jpath.innerjoinpath {MATERIALPATH :path.pathtype 339 :parent_relids (b 2) :required_outer (b) :path.parallel_aware false :path.parallel_safe false :path.parallel_workers 0 :path.rows 1 :path.startup_cost 0 :path.total_cost 0.005 :path.pathkeys <> :subpath {PATH :pathtype 318 :parent_relids (b 2) :required_outer (b) :parallel_aware false :parallel_safe false :parallel_workers 0 :rows 1 :startup_cost 0 :total_cost 0 :pathkeys <> } } :jpath.joinrestrictinfo ( {RESTRICTINFO :clause {OPEXPR :opno 410 :opfuncid 467 :opresulttype 16 :opretset false :opcollid 0 :inputcollid 0 :args ( {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } ) :location -1 } :is_pushed_down true :can_join true :pseudoconstant false :has_clone false :is_clone false :leakproof false :has_volatile 2 :security_level 0 :num_base_rels 2 :clause_relids (b 1 2) :required_relids (b 1 2) :incompatible_relids (b) :outer_relids (b) :left_relids (b 1) :right_relids (b 2) :orclause <> :rinfo_serial 1 :eval_cost.startup 0 :eval_cost.per_tuple 100000 :norm_selec 1 :outer_selec -1 :mergeopfamilies <> :left_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 1 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 1 :varattnosyn 1 :location -1 } :em_relids (b 1) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :right_em {EQUIVALENCEMEMBER :em_expr {VAR :varno 2 :varattno 1 :vartype 25 :vartypmod -1 :varcollid 0 :varnullingrels (b) :varlevelsup 0 :varnosyn 2 :varattnosyn 1 :location -1 } :em_relids (b 2) :em_is_const false :em_is_child false :em_datatype 20 :em_jdomain <> } :outer_is_left true :hashjoinoperator 410 :left_bucketsize -1 :right_bucketsize -1 :left_mcvfreq -1 :right_mcvfreq -1 :left_hasheqoperator 98 :right_hasheqoperator 98 } ) } ) :direct_lateral_relids (b) :lateral_relids (b) :relid 0 :reltablespace 0 :rtekind 2 :min_attr 0 :max_attr 0 :nulling_relids (b) :lateral_vars <> :lateral_referencers (b) :indexlist <> :statlist <> :pages 0 :tuples 0 :allvisfrac 0 :eclass_indexes (b) :subroot <> :subplan_params <> :rel_parallel_workers -1 :amflags 0 :serverid 0 :userid 0 :useridiscurrent false :unique_for_rels <> :non_unique_for_rels <> :baserestrictinfo <> :baserestrictcost.startup 0 :baserestrictcost.per_tuple 0 :baserestrict_min_security 4294967295 :joininfo <> :has_eclass_joins false :consider_partitionwise_join false :top_parent_relids (b) :nparts -1 :partbounds_merged false :partition_qual <> :live_parts (b) :all_partrels (b) } Result: Rows: 1.00 TotalCost: 100000.01 { Inner Join Loop Strategy Rels: [1,2] Op: (1,1) = (2,1) { Node Rels: [1] } { Node Rels: [2] } } Time for Enumerate(MakeClique(6)): mean: 0.000960, values: [0.000738519,0.000886351,0.001256401] seconds Time for Enumerate(MakeStar(6)): mean: 0.000251, values: [0.000185585,0.000298364,0.000269486] seconds Time for Enumerate(MakeChain(6)): mean: 0.000218, values [0.00019083,0.000238749,0.000225099] seconds |72.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut >> TestIssuesGrouping::ShouldCountEveryIssue [GOOD] >> TestIssuesGrouping::ShouldRemoveOldIssues [GOOD] >> TestIssuesGrouping::ShouldRemoveIfMoreThanMaxIssues [GOOD] >> TestIssuesGrouping::ShouldRemoveTheOldestIfMoreThanMaxIssues [GOOD] >> TestIssuesGrouping::ShouldSaveSubIssues [GOOD] >> ResultReceiver::ReceiveStatus [GOOD] >> ResultReceiver::ReceiveError [GOOD] >> ResultReceiver::WriteQueue [GOOD] >> SparsedArrayAccessor::FiltersDef [GOOD] >> SparsedArrayAccessor::SlicesNull [GOOD] >> SparsedArrayAccessor::SlicesDef [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] >> common/__init__.py::py2_flake8 [GOOD] >> common/composite_assert.py::py2_flake8 [GOOD] >> common/generators.py::py2_flake8 [GOOD] >> common/local_db_scheme.py::py2_flake8 [GOOD] >> common/path_types.py::py2_flake8 [GOOD] >> common/protobuf_cms.py::py2_flake8 [GOOD] >> common/protobuf_kv.py::py2_flake8 [GOOD] >> common/types.py::py2_flake8 [GOOD] >> common/workload_manager.py::py2_flake8 [GOOD] >> harness/__init__.py::py2_flake8 [GOOD] >> harness/kikimr_cluster.py::py2_flake8 [GOOD] >> harness/kikimr_config.py::py2_flake8 [GOOD] >> harness/kikimr_port_allocator.py::py2_flake8 [GOOD] >> harness/param_constants.py::py2_flake8 [GOOD] >> harness/util.py::py2_flake8 [GOOD] >> kv/helpers.py::py2_flake8 [GOOD] |72.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/common/libcore-kqp-common.a >> matchers/collection.py::py2_flake8 [GOOD] >> matchers/response.py::py2_flake8 [GOOD] >> matchers/scheme_ops.py::py2_flake8 [GOOD] >> nemesis/__init__.py::py2_flake8 [GOOD] |72.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut >> nemesis/nemesis_network.py::py2_flake8 [GOOD] >> nemesis/nemesis_time_terrorist.py::py2_flake8 [GOOD] >> nemesis/network/client.py::py2_flake8 [GOOD] >> nemesis/safety_warden.py::py2_flake8 [GOOD] >> predicates/blobstorage.py::py2_flake8 [GOOD] >> predicates/hive.py::py2_flake8 [GOOD] >> FormatCSV::EmptyData [GOOD] >> FormatCSV::Common |72.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/dq/actors/ut/unittest >> ResultReceiver::WriteQueue [GOOD] >> FormatCSV::Instants [GOOD] >> common/cms.py::py2_flake8 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/formats/arrow/accessor/sparsed/ut/unittest >> SparsedArrayAccessor::SlicesDef [GOOD] >> FormatCSV::Common [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint32;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=5;size=21; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=5;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint32;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=4;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=3;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=3;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=2;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=1;count=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=5;count=5; >> common/delayed.py::py2_flake8 [GOOD] >> common/helpers.py::py2_flake8 [GOOD] >> FormatCSV::Strings [GOOD] >> common/msgbus_types.py::py2_flake8 [GOOD] >> FormatCSV::Nulls [GOOD] >> common/protobuf.py::py2_flake8 [GOOD] >> common/protobuf_console.py::py2_flake8 [GOOD] >> common/protobuf_ss.py::py2_flake8 [GOOD] >> common/wait_for.py::py2_flake8 [GOOD] >> common/yatest_common.py::py2_flake8 [GOOD] >> harness/daemon.py::py2_flake8 [GOOD] >> harness/kikimr_cluster_interface.py::py2_flake8 [GOOD] >> harness/kikimr_node_interface.py::py2_flake8 [GOOD] >> harness/kikimr_runner.py::py2_flake8 [GOOD] >> harness/tls_tools.py::py2_flake8 [GOOD] >> kv/__init__.py::py2_flake8 [GOOD] >> matchers/__init__.py::py2_flake8 [GOOD] >> matchers/datashard_matchers.py::py2_flake8 [GOOD] |72.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/py2_flake8 >> predicates/hive.py::py2_flake8 [GOOD] >> matchers/response_matchers.py::py2_flake8 [GOOD] >> matchers/tablets.py::py2_flake8 [GOOD] >> nemesis/nemesis_core.py::py2_flake8 [GOOD] >> nemesis/nemesis_process_killers.py::py2_flake8 [GOOD] >> nemesis/network/__init__.py::py2_flake8 [GOOD] >> nemesis/remote_execution.py::py2_flake8 [GOOD] >> predicates/__init__.py::py2_flake8 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump] [GOOD] >> TBsDbStat::ChaoticParallelWrite_DbStat [GOOD] >> predicates/executor.py::py2_flake8 [GOOD] >> predicates/tx.py::py2_flake8 [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] >> TBsHuge::Simple |72.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/py2_flake8 >> predicates/tx.py::py2_flake8 [GOOD] |72.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/io_formats/arrow/scheme/ut/unittest >> FormatCSV::Nulls [GOOD] Test command err: 12000000 Cannot read CSV: no columns specified Cannot read CSV: Invalid: Empty CSV file d'Artagnan '"' Jeanne d'Arc "'" 'd'Artagnan' ''"'' 'Jeanne d'Arc' '"'"' d'Artagnan '"' Jeanne d'Arc "'" src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: ,"","" ,"","" ,, parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: \N,"","" \N,"\N","\N" \N,\N,\N parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,\N,\N ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ src: NULL,"","" NULL,"NULL","NULL" NULL,NULL,NULL parsed: ᴺᵁᴸᴸ,, ᴺᵁᴸᴸ,NULL,NULL ᴺᵁᴸᴸ,ᴺᵁᴸᴸ,ᴺᵁᴸᴸ |72.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 >> TBsHuge::Simple [GOOD] >> TBsHuge::SimpleErasureNone >> test_transform.py::TestYamlConfigTransformations::test_basic[args0-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] |72.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |72.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |72.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut |72.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> TBsHuge::SimpleErasureNone [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] >> Mirror3of4::ReplicationSmall |71.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session >> TComputeActorAsyncInputHelperTest::PollAsyncInput [GOOD] >> TComputeActorTest::Empty [GOOD] >> TComputeActorTest::ReceiveData [GOOD] >> TDqSourceWatermarkTrackerTest::StartWatermark1 [GOOD] >> TDqSourceWatermarkTrackerTest::StartWatermark2 [GOOD] >> TDqSourceWatermarkTrackerTest::StartWatermark3 [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovement1 [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovement2 [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovement3 [GOOD] >> TDqSourceWatermarkTrackerTest::WatermarkMovement4 [GOOD] >> TDqSourceWatermarkTrackerTest::IdleFirstShouldReturnStartWatermark [GOOD] >> TDqSourceWatermarkTrackerTest::Idle1 [GOOD] >> TDqSourceWatermarkTrackerTest::IdleNextCheckAt [GOOD] >> TIssuesBufferTest::TestEmpty [GOOD] >> TIssuesBufferTest::TestSimplePush [GOOD] >> TIssuesBufferTest::TestPushWithOverflow [GOOD] >> TIssuesBufferTest::TestSmallBuffer [GOOD] >> TIssuesBufferTest::TestUseAfterDump [GOOD] |71.5%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/wilson/ut/ydb-library-actors-wilson-ut |71.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a >> TBtreeIndexTPartLarge::MiddleKeys1GB [GOOD] >> TBtreeIndexTPartLarge::BigKeys1GB ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/compute/ut/unittest >> TIssuesBufferTest::TestUseAfterDump [GOOD] Test command err: 2025-07-08T11:55:05.794061Z :Unused ERROR: TxId: TxId, task: 0. Unexpected input channelId: 0 seqNo: 0, expected: 1 |71.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util >> test_transform.py::TestYamlConfigTransformations::test_basic[args1-dump_ds_init] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] |70.7%| [TA] $(B)/ydb/tests/library/test-results/py2_flake8/{meta.json ... results_accumulator.log} |70.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stability/tool/tool |70.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stability/tool/tool |70.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |70.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump] [GOOD] >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] |70.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/ut/ydb-core-fq-libs-hmac-ut >> ydb-tests-functional-canonical::import_test [GOOD] >> TYardTest::TestEnormousDisk [GOOD] |70.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut |70.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] >> HmacSha::HmacSha1 [GOOD] |69.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/workload/benchmark_base/ut/ydb-library-workload-benchmark_base-ut |69.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestEnormousDisk [GOOD] Test command err: 2025-07-08T11:54:29.296786Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.297200Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7378563360473222827 MagicNextLogChunkReference: 472570324176290032 MagicLogChunk: 9284297497256767328 MagicDataChunk: 4400291947234635200 MagicSysLogChunk: 5982664749680058578 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669259160 (2025-07-08T11:54:29.259160Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.298677Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.299403Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.299584Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.299937Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.395462Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1181067 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.395793Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.453078Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.453271Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 362482103110879707 MagicNextLogChunkReference: 12918210697338452221 MagicLogChunk: 1296273416771190945 MagicDataChunk: 4004015091740308289 MagicSysLogChunk: 9134538857906171053 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669422181 (2025-07-08T11:54:29.422181Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.455721Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.456944Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.456980Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.457220Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.550155Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1546316 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.553584Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 1536 Offset# 4294967295 Size# 128 2025-07-08T11:54:29.553602Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:922} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560009518 PDiskId# 1 2025-07-08T11:54:30.063240Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.072050Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 5242880000 bytes (5 GB) Guid: 10022951889808514248 MagicNextLogChunkReference: 12902532288856091707 MagicLogChunk: 6820649720564777759 MagicDataChunk: 14366192791172317603 MagicSysLogChunk: 15958815614652216450 MagicFormatChunk: 17332287817462050952 ChunkSize: 6291456 bytes (6 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669872918 (2025-07-08T11:54:29.872918Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:30.084427Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:30.095630Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:30.095667Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.095987Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:30.162492Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1437291 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.273922Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 1536 Offset# 4294967295 Size# 128 2025-07-08T11:54:30.273949Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:922} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560006702 PDiskId# 1 2025-07-08T11:54:30.819588Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.819777Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 16777216000 bytes (16 GB) Guid: 9653823961270056367 MagicNextLogChunkReference: 9430108301000229711 MagicLogChunk: 10831845269661072045 MagicDataChunk: 9815994071076347782 MagicSysLogChunk: 16018537472391650551 MagicFormatChunk: 17332287817462050952 ChunkSize: 18874368 bytes (18 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975670781438 (2025-07-08T11:54:30.781438Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:30.820834Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:30.821391Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:30.821410Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.821680Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:30.919925Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2086910 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:31.378172Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 4608 Offset# 4294967295 Size# 128 2025-07-08T11:54:31.378192Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:922} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 2 ReqId# 2560005934 PDiskId# 1 2025-07-08T11:54:32.369270Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:32.369475Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 8388608000 bytes (8 GB) Guid: 15586861639837807378 MagicNextLogChunkReference: 17893980481351963807 MagicLogChunk: 2927311393181680885 MagicDataChunk: 11094133048943118954 MagicSysLogChunk: 17496061142142810003 MagicFormatChunk: 17332287817462050952 ChunkSize: 10485760 bytes (10 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975672342411 (2025-07-08T11:54:32.342411Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:32.370815Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:32.371782Z :BS_PDISK N ... nkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-07-08T11:54:47.258671Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:47.278520Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:47.281066Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:55:02.888306Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:02.888592Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16895731365206500714 MagicNextLogChunkReference: 2827139038360115026 MagicLogChunk: 3668590863321505718 MagicDataChunk: 6000391575361131848 MagicSysLogChunk: 14910244314522049190 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975686583269 (2025-07-08T11:54:46.583269Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:03.057503Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 5202658 NonceLog# 2878178 NonceData# 4245251907} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:55:03.072173Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:55:03.078964Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 97 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 397312} PDiskId# 1 2025-07-08T11:55:03.081208Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 397312} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:03.112002Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:03.706948Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:55:05.164059Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:05.164281Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16895731365206500714 MagicNextLogChunkReference: 2827139038360115026 MagicLogChunk: 3668590863321505718 MagicDataChunk: 6000391575361131848 MagicSysLogChunk: 14910244314522049190 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975686583269 (2025-07-08T11:54:46.583269Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:05.288995Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 7055842 NonceLog# 4472795 NonceData# 4246722971} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:55:05.299021Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:55:06.089535Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 126331 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} PDiskId# 1 2025-07-08T11:55:06.093220Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 517451776} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:06.107694Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:06.109300Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:55:06.971132Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:06.971408Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16895731365206500714 MagicNextLogChunkReference: 2827139038360115026 MagicLogChunk: 3668590863321505718 MagicDataChunk: 6000391575361131848 MagicSysLogChunk: 14910244314522049190 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975686583269 (2025-07-08T11:54:46.583269Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:07.096466Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 9120718 NonceLog# 5574847 NonceData# 4248356885} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:55:07.110293Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:55:07.477246Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32000 SectorIdx# 119960 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} PDiskId# 1 2025-07-08T11:55:07.481014Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32000 OffsetInChunk# 491356160} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:07.496016Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:07.497368Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:55:07.591214Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:07.591638Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 109951162777600 bytes (109951 GB) Guid: 16895731365206500714 MagicNextLogChunkReference: 2827139038360115026 MagicLogChunk: 3668590863321505718 MagicDataChunk: 6000391575361131848 MagicSysLogChunk: 14910244314522049190 MagicFormatChunk: 17332287817462050952 ChunkSize: 543162368 bytes (543 MB) SectorSize: 4096 SysLogSectorCount: 8112 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975686583269 (2025-07-08T11:54:46.583269Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:07.713562Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 10652253 NonceLog# 6736187 NonceData# 4250278659} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:55:07.723520Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:55:09.080477Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 32001 SectorIdx# 18915 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} PDiskId# 1 2025-07-08T11:55:09.083681Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 32001 OffsetInChunk# 77475840} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:09.099468Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:09.101597Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:55:09.105327Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:419} outSectorIdx >= chunkSizeUsableSectors PDiskId# 1 OutSectorIdx# 1056832 OutLastSectorIdx# 1056832 ChunkSizeUsableSectors# 132608 Offset# 4294967295 Size# 128 2025-07-08T11:55:09.105338Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:922} SendChunkReadErrorPDiskId# 1 invalid size# 128 and offset# 4294967295 for ownerId# 3 can't read chunkIdx# 32002 ReqId# 2560572462 PDiskId# 1 >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] |69.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |69.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut_perf/ydb-core-erasure-ut_perf |69.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/hmac/ut/unittest >> HmacSha::HmacSha1 [GOOD] |69.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/intersection_tree/ut/ydb-library-intersection_tree-ut |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflight [GOOD] |69.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/ut_transform/py3test >> test_transform.py::TestYamlConfigTransformations::test_simplified[dump_ds_init] [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock |69.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so >> TErasurePerfTest::Split [GOOD] >> TErasurePerfTest::Restore |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut >> IntersectionTree::Randomized [GOOD] |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/actors/spilling/ut/ydb-library-yql-dq-actors-spilling-ut |69.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 >> YqlPqSimpleTests::SelectWithNoSchema |69.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/intersection_tree/ut/unittest >> IntersectionTree::Randomized [GOOD] >> Mirror3of4::ReplicationSmall [GOOD] >> Mirror3of4::ReplicationHuge |69.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so >> ydb-tests-stress-kv-tests::import_test [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks >> TBlobStorageHullSstIt::TestSeekBefore [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] >> YqlPqSimpleTests::SelectWithNoSchema [GOOD] >> YqlPqSimpleTests::SelectWithSchema >> DqSpillingFileTests::Simple >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexWithSmallWriteBlocks [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToFirst [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekToLast [GOOD] >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> DqSpillingFileTests::SingleFilePart [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstOneIndex [GOOD] >> DqSpillingFileTests::Simple [GOOD] >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound >> DqSpillingFileTests::NoSpillingService >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] |69.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/kv/tests/import_test >> ydb-tests-stress-kv-tests::import_test [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstMultiIndexPartOutbound [GOOD] |69.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstMultiIndex [GOOD] >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] >> TBlobStorageHullSstIt::TestSeekToLast [GOOD] >> DqSpillingFileTests::NoSpillingService [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex >> DqSpillingFileTests::Write_FileSizeLimitExceeded [GOOD] >> DqSpillingFileTests::StartError [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndNext [GOOD] >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] >> DqSpillingFileTests::FdCounterSingleFile [GOOD] >> DqSpillingFileTests::Write_TotalSizeLimitExceeded [GOOD] >> DqSpillingFileTests::FdCounterMultiFile [GOOD] >> TBlobStorageHullSstIt::TestSeekToFirst [GOOD] >> YqlPqSimpleTests::SelectWithSchema [GOOD] |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobOneSstOneIndexPartOutbound [GOOD] |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::SingleFilePart [GOOD] >> DqSpillingFileTests::MultipleFileParts [GOOD] >> YqlPqSimpleTests::SelectStarWithSchema >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] >> DqSpillingFileTests::ReadError [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullOrderedSstsIt::TestSeekAfterAndPrev [GOOD] >> DqSpillingFileTests::ThreadPoolQueueOverflow |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::Simple [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekAfterAndPrev [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockOneSstMultiIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::StartError [GOOD] Test command err: 2025-07-08T11:55:12.965508Z :KQP_COMPUTE ERROR: (TIoSystemError) (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /nonexistent 2025-07-08T11:55:12.965616Z :KQP_COMPUTE ERROR: Service is broken, send error to client [1:5:2052] 2025-07-08T11:55:12.965639Z :KQP_COMPUTE ERROR: Service is broken, send error to client [1:5:2052] 2025-07-08T11:55:12.965646Z :KQP_COMPUTE ERROR: Service is broken, send error to client [1:5:2052] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::LogoBlobMultiSstOneIndexPartOutbound [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::Write_FileSizeLimitExceeded [GOOD] Test command err: 2025-07-08T11:55:12.994656Z :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:5:2052], blobId: 2, bytes: 50 |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::NoSpillingService [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekExactAndPrev [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::FdCounterMultiFile [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullWriteSst::BlockMultiSstOneIndex [GOOD] |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::MultipleFileParts [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/hulldb/generic/ut/unittest >> TBlobStorageHullSstIt::TestSeekNotExactBefore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::Write_TotalSizeLimitExceeded [GOOD] Test command err: 2025-07-08T11:55:13.154013Z :KQP_COMPUTE ERROR: [Write] Total size limit exceeded. From: [1:5:2052], blobId: 2, bytes: 50 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeIncreased [GOOD] Test command err: 2025-07-08T11:54:49.975988Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:65:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.975996Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:895:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.975998Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:585:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976000Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:920:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976001Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:506:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976003Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:521:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976005Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:958:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976006Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:541:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976008Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:239:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976012Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:472:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976110Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:225:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976113Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:89:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976114Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:725:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976115Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:137:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976115Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:852:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976116Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:862:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976117Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:609:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976122Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:370:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976123Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:210:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976124Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:682:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976190Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:706:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976192Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:900:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976193Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:915:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976195Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:376:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976196Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:939:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976198Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:448:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976199Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:191:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976200Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:711:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976202Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:98:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976203Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:45:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976259Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:108:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976260Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:672:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976263Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:21:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976264Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:36:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976265Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:924:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976266Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:162:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976267Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:619:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976267Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:618:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976268Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:531:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976269Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:322:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976348Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:274:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976352Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:512:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976353Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:842:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976355Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:205:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976357Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:551:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976358Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:823:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976360Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:337:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976362Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:409:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976363Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:1:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976365Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:361:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976425Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:720:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976426Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:794:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976428Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:847:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976428Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:983:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976429Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:346:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976430Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:244:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976431Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:439:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976432Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:118:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976433Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:750:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976434Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:133:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976484Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:103:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976486Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:861:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976488Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:653:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976489Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:195:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976491Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:973:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976493Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:764:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976495Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:390:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976496Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:857:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976498Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:176:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976500Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:696:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976544Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:419:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976545Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:681:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976546Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:113:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976547Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:905:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976547Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:371:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976549Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:278:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976550Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:667:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976551Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:784:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976552Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:31:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976552Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:580:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976629Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:929:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976634Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:234:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976636Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:74:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976637Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:511:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976639Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:584:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976640Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:157:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976641Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:779:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976643Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:652:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976645Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:482:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976647Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:516:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976712Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:404:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976715Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:997:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976716Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:64:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976718Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:871:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976720Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:963:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976722Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:200:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976724Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:380:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976726Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:546:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976729Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:229:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976731Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:774:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976800Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:161:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976802Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:648:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976806Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:424:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976807Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:171:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976808Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:434:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976809Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:463:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976810Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:6:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976811Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:30:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976813Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:613:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976814Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:492:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976877Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:502:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976878Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:99:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976880Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:264:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976882Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:341:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976883Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:308:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976885Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:438:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976886Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:16:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976888Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:579:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976889Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:147:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976891Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:891:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976973Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:837:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976975Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:614:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976977Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:298:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976978Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:832:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976980Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:701:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976983Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:954:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976984Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:716:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976986Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:866:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976988Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:69:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.976989Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:890:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.977037Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:594:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.977039Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:395:0:0:66560:1] Marker# BSVS08 2025-07-08T11:54:49.977041Z :BS_VDISK_PUT CRIT: VDISK[0:_:0:0:0]: TEvVMultiPut: TEvVMultiPut has huge blob# [5000:1:949:0:0:66560:1] Marker# BSVS08 >> YqlPqSimpleTests::SelectStarWithSchema [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::ReadError [GOOD] Test command err: 2025-07-08T11:55:13.188105Z :KQP_COMPUTE ERROR: [Read async] file: /home/runner/.ya/build/build_root/43nv/001b19/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/testing_out_stuff/chunk3/dq_spilling_158072/node_1_95580b58-1b7feb4b-b1abc187-75af9119/1_test_0, blobId: 0, offset: 0, error: (Error 2: No such file or directory) util/system/file.cpp:936: can't open "/home/runner/.ya/build/build_root/43nv/001b19/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/testing_out_stuff/chunk3/dq_spilling_158072/node_1_95580b58-1b7feb4b-b1abc187-75af9119/1_test_0" with mode RdOnly (0x00000008) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/dq/actors/spilling/ut/unittest >> DqSpillingFileTests::ThreadPoolQueueOverflow Test command err: 2025-07-08T11:55:13.295608Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295659Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295679Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295692Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295705Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295716Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295727Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295739Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295751Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295764Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295778Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295790Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295802Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295814Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295827Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.295968Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296001Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296016Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296032Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296047Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296064Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296078Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296092Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296108Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296125Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296141Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296158Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296176Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296189Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296208Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296220Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296237Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296265Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296283Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296302Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296326Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296339Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296358Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296377Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296395Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296414Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296433Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296452Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296471Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296490Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296510Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296529Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296553Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296568Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296597Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296614Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296636Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296645Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296670Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296693Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296718Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296729Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296797Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296824Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296854Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296873Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296908Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296921Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.296973Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297004Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297030Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297067Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297079Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297103Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297129Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297167Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297196Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297209Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297237Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297274Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297299Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297318Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297359Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297387Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297407Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297423Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297470Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297499Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297512Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297559Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297580Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297612Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297624Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297656Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297698Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297710Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297742Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297787Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297822Z :KQP_COMPUTE ERROR: [Write] Can not run operation 2025-07-08T11:55:13.297846Z :KQP_COMPUTE ERROR: [Write] Can not run operation [Write] Can not run operation >> TBtreeIndexTPartLarge::BigKeys1GB [GOOD] >> TBtreeIndexTPartLarge::CutKeys |68.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/interconnect/ut_huge_cluster/ydb-library-actors-interconnect-ut_huge_cluster |68.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/ut/ydb-core-resource_pools-ut |68.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/libpy3local_ydb.global.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/yql/providers/pq/provider/ut/unittest >> YqlPqSimpleTests::SelectStarWithSchema [GOOD] Test command err: 2025-07-08 11:55:11.718 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [DQ] interconnect_helpers.cpp:215: Start listener ::1:31337 socket: 7 2025-07-08 11:55:11.780 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [DQ] service_node.cpp:126: Starting GRPC on 31338 GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option 2025-07-08 11:55:11.787 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [default] storage.cpp:178: FileStorage initialized in "/home/runner/.ya/build/build_root/43nv/000b45/r3tmp/tmpPxlI6J/", temporary dir: "/home/runner/.ya/build/build_root/43nv/000b45/r3tmp/tmpPxlI6J/156151", files: 0, total size: 0 Parse SQL... ( (import aggregate_module '"/lib/yql/aggregate.yqls") (import window_module '"/lib/yql/window.yqls") (import core_module '"/lib/yql/core.yqls") (let world (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let world (Configure! world (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let world (block '( (let x (Read! world (DataSource '"pq" '"lb") (MrTableConcat (Key '('table (String '"my_in_topic")))) (Void) '())) (let world (Left! x)) (let table1 (Right! x)) (let values (block '( (let select (block '( (let core table1) (let core (Filter core (lambda '(row) (Coalesce ("<" (Member row '"Data") (String '"100")) (Bool 'false))))) (let core (PersistableRepr (block '( (let projectCoreType (TypeOf core)) (let core (SqlProject core '((SqlProjectItem projectCoreType '"Data" (lambda '(row) (block '( (let res (Member row '"Data")) (return res) ))))))) (return core) )))) (return core) ))) (return select) ))) (let world (block '( (let sink (DataSink '"pq" '"lb")) (let world (Write! world sink (Key '('table (String '"my_out_topic"))) values '('('mode 'append)))) (return world) ))) (return world) ))) (let world (block '( (let world (CommitAll! world)) (return world) ))) (return world) ) Compile... Optimize... 2025-07-08 11:55:12.311 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [DQ] yql_dq_gateway.cpp:599: {351fa7a-662f53c0-d478d395-1bdf3a9f} OpenSession 2025-07-08 11:55:12.312 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_load_meta.cpp:35: {351fa7a-662f53c0-d478d395-1bdf3a9f} Load topic meta for: `lb`.`my_out_topic` 2025-07-08 11:55:12.312 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_load_meta.cpp:35: {351fa7a-662f53c0-d478d395-1bdf3a9f} Load topic meta for: `lb`.`my_in_topic` 2025-07-08 11:55:12.312 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_datasource.cpp:88: {351fa7a-662f53c0-d478d395-1bdf3a9f} RewriteIO 2025-07-08 11:55:12.312 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_datasource.cpp:88: {351fa7a-662f53c0-d478d395-1bdf3a9f} RewriteIO 2025-07-08 11:55:12.312 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_datasink.cpp:102: {351fa7a-662f53c0-d478d395-1bdf3a9f} Rewrite Write! ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 '('"Data" $5)) (let $7 (StructType $6)) (let $8 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $7)) (let $9 '"_yql_sys_create_time") (let $10 '"_yql_sys_tsp_write_time") (let $11 '"_yql_sys_partition_id") (let $12 '"_yql_sys_message_group_id") (let $13 '('('"system" $9) '('"system" $10) '('"system" $11) '('"system" '"_yql_sys_offset") '('"system" $12) '('"system" '"_yql_sys_seq_no"))) (let $14 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $13 $7)) (let $15 (DataType 'Timestamp)) (let $16 (DataType 'Uint64)) (let $17 (ListType (StructType $6 '($9 $15) '($12 $5) '('"_yql_sys_offset" $16) '($11 $16) '('"_yql_sys_seq_no" $16) '($10 $15)))) (let $18 (SqlProjectItem $17 '"Data" (lambda '($22) (Member $22 '"Data")))) (let $19 (SqlProjectItem $17 $10 (lambda '($23) (Member $23 $10)))) (let $20 (PqWriteTopic! $2 $3 $8 (RemovePrefixMembers (SqlProject (Filter (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $14 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($21) (< (Member $21 '"Data") (String '"100")))) '($18 $19)) '('_yql_)) 'append '())) (return (Commit! $20 $3)) ) 2025-07-08 11:55:12.315 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:4013: {351fa7a-662f53c0-d478d395-1bdf3a9f} Canonize Filter ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 '('"Data" $5)) (let $7 (StructType $6)) (let $8 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $7)) (let $9 '"_yql_sys_create_time") (let $10 '"_yql_sys_tsp_write_time") (let $11 '"_yql_sys_partition_id") (let $12 '"_yql_sys_message_group_id") (let $13 '('('"system" $9) '('"system" $10) '('"system" $11) '('"system" '"_yql_sys_offset") '('"system" $12) '('"system" '"_yql_sys_seq_no"))) (let $14 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $13 $7)) (let $15 (DataType 'Timestamp)) (let $16 (DataType 'Uint64)) (let $17 (ListType (StructType $6 '($9 $15) '($12 $5) '('"_yql_sys_offset" $16) '($11 $16) '('"_yql_sys_seq_no" $16) '($10 $15)))) (let $18 (SqlProjectItem $17 '"Data" (lambda '($22) (Member $22 '"Data")))) (let $19 (SqlProjectItem $17 $10 (lambda '($23) (Member $23 $10)))) (let $20 (PqWriteTopic! $2 $3 $8 (RemovePrefixMembers (SqlProject (FlatMap (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $14 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($21) (OptionalIf (< (Member $21 '"Data") (String '"100")) $21))) '($18 $19)) '('_yql_)) 'append '())) (return (Commit! $20 $3)) ) 2025-07-08 11:55:12.316 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:6429: {351fa7a-662f53c0-d478d395-1bdf3a9f} Expand SqlProject ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (StructType '('"Data" (DataType 'String)))) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $5)) (let $7 '"_yql_sys_tsp_write_time") (let $8 '('('"system" '"_yql_sys_create_time") '('"system" $7) '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $9 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $8 $5)) (let $10 (PqWriteTopic! $2 $3 $6 (RemovePrefixMembers (FlatMap (FlatMap (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $9 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($11) (OptionalIf (< (Member $11 '"Data") (String '"100")) $11))) (lambda '($12) (AsList (FlattenMembers '('"" (AsStruct '('"Data" (Member $12 '"Data")))) '('"" (AsStruct '($7 (Member $12 $7)))))))) '('_yql_)) 'append '())) (return (Commit! $10 $3)) ) 2025-07-08 11:55:12.317 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_opt_utils.cpp:764: {351fa7a-662f53c0-d478d395-1bdf3a9f} Enumerate struct literal for FlattenMembers 2025-07-08 11:55:12.317 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_opt_utils.cpp:764: {351fa7a-662f53c0-d478d395-1bdf3a9f} Enumerate struct literal for FlattenMembers ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (StructType '('"Data" (DataType 'String)))) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $5)) (let $7 '"_yql_sys_tsp_write_time") (let $8 '('('"system" '"_yql_sys_create_time") '('"system" $7) '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $9 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $8 $5)) (let $10 (PqWriteTopic! $2 $3 $6 (RemovePrefixMembers (FlatMap (FlatMap (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $9 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($11) (OptionalIf (< (Member $11 '"Data") (String '"100")) $11))) (lambda '($12) (AsList (AsStruct '('"Data" (Member $12 '"Data")) '($7 (Member $12 $7)))))) '('_yql_)) 'append '())) (return (Commit! $10 $3)) ) 2025-07-08 11:55:12.317 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:2019: {351fa7a-662f53c0-d478d395-1bdf3a9f} FlatMap with single arg AsList ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (StructType '('"Data" (DataType 'String)))) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $5)) (let $7 '"_yql_sys_tsp_write_time") (let $8 '('('"system" '"_yql_sys_create_time") '('"system" $7) '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $9 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $8 $5)) (let $10 (PqWriteTopic! $2 $3 $6 (RemovePrefixMembers (FlatMap (FlatMap (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $9 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($11) (OptionalIf (< (Member $11 '"Data") (String '"100")) $11))) (lambda '($12) (Just (AsStruct '('"Data" (Member $12 '"Data")) '($7 (Member $12 $7)))))) '('_yql_)) 'append '())) (return (Commit! $10 $3)) ) 2025-07-08 11:55:12.318 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:2074: {351fa7a-662f53c0-d478d395-1bdf3a9f} FlatMap to ExtractMembers ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (StructType '('"Data" (DataType 'String)))) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() $5)) (let $7 '"_yql_sys_tsp_write_time") (let $8 '('('"system" '"_yql_sys_create_time") '('"system" $7) '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $9 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $8 $5)) (let $10 (PqWriteTopic! $2 $3 $6 (RemovePrefixMembers (ExtractMembers (FlatMap (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $9 (Void) '"raw" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) (lambda '($11) (OptionalIf (< (Member $11 '"Data") (String '"100")) $11))) '('"Data" $7)) '('_yql_)) 'append '())) (return (Commit! $10 $3)) ) 2025-07-08 11:55:12.319 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_opt_utils.cpp:843: {351fa7a-662f53c0-d478d395 ... gine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '('('"system" '"_yql_sys_create_time") '('"system" '"_yql_sys_tsp_write_time") '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $7 (StructType '('"x" $5) '('"y" (DataType 'Int32))))) (let $9 (PqWriteTopic! $2 $3 $6 (FlatMap (ExtractMembers (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $8 '('"y" '"x") '"json" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) '('"x" '"y")) (lambda '($10) (AsList (FlattenMembers '('"" (AsStruct '('"x" (Member $10 '"x")))))))) 'append '())) (return (Commit! $9 $3)) ) 2025-07-08 11:55:13.452 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_opt_utils.cpp:764: {32c70b70-c1c03f80-2d51032c-51bf76ab} Enumerate struct literal for FlattenMembers ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '('('"system" '"_yql_sys_create_time") '('"system" '"_yql_sys_tsp_write_time") '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $7 (StructType '('"x" $5) '('"y" (DataType 'Int32))))) (let $9 (PqWriteTopic! $2 $3 $6 (FlatMap (ExtractMembers (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $8 '('"y" '"x") '"json" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) '('"x" '"y")) (lambda '($10) (AsList (AsStruct '('"x" (Member $10 '"x")))))) 'append '())) (return (Commit! $9 $3)) ) 2025-07-08 11:55:13.452 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:2019: {32c70b70-c1c03f80-2d51032c-51bf76ab} FlatMap with single arg AsList ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '('('"system" '"_yql_sys_create_time") '('"system" '"_yql_sys_tsp_write_time") '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $7 (StructType '('"x" $5) '('"y" (DataType 'Int32))))) (let $9 (PqWriteTopic! $2 $3 $6 (FlatMap (ExtractMembers (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $8 '('"y" '"x") '"json" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) '('"x" '"y")) (lambda '($10) (Just (AsStruct '('"x" (Member $10 '"x")))))) 'append '())) (return (Commit! $9 $3)) ) 2025-07-08 11:55:13.453 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:2074: {32c70b70-c1c03f80-2d51032c-51bf76ab} FlatMap to ExtractMembers ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '('('"system" '"_yql_sys_create_time") '('"system" '"_yql_sys_tsp_write_time") '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $7 (StructType '('"x" $5) '('"y" (DataType 'Int32))))) (let $9 (PqWriteTopic! $2 $3 $6 (ExtractMembers (ExtractMembers (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $8 '('"y" '"x") '"json" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) '('"x" '"y")) '('"x")) 'append '())) (return (Commit! $9 $3)) ) 2025-07-08 11:55:13.454 DEBUG ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [core] yql_co_simple1.cpp:4054: {32c70b70-c1c03f80-2d51032c-51bf76ab} ExtractMembers over ExtractMembers ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '('('"system" '"_yql_sys_create_time") '('"system" '"_yql_sys_tsp_write_time") '('"system" '"_yql_sys_partition_id") '('"system" '"_yql_sys_offset") '('"system" '"_yql_sys_message_group_id") '('"system" '"_yql_sys_seq_no"))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $7 (StructType '('"x" $5) '('"y" (DataType 'Int32))))) (let $9 (PqWriteTopic! $2 $3 $6 (ExtractMembers (Right! (PqReadTopic! $2 (DataSource '"pq" '"lb") $8 '('"y" '"x") '"json" '"" (Void) '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX")))) '('"x")) 'append '())) (return (Commit! $9 $3)) ) 2025-07-08 11:55:13.454 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [DQ] yql_dq_recapture.cpp:108: {32c70b70-c1c03f80-2d51032c-51bf76ab} DqsRecapture ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 '"_yql_sys_create_time") (let $8 '"_yql_sys_tsp_write_time") (let $9 '"_yql_sys_partition_id") (let $10 '"_yql_sys_message_group_id") (let $11 '('('"system" $7) '('"system" $8) '('"system" $9) '('"system" '"_yql_sys_offset") '('"system" $10) '('"system" '"_yql_sys_seq_no"))) (let $12 '('"x" $5)) (let $13 '('"y" (DataType 'Int32))) (let $14 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 $11 (StructType $12 $13))) (let $15 '('"SharedReading" '"0")) (let $16 '('('"Consumer" '"my_test_consumer") '('"Endpoint" '"lb.ru") $15 '('"ReconnectPeriod" '"") '('"Format" '"json") '('"ReadGroup" '""))) (let $17 (DataType 'Timestamp)) (let $18 (DataType 'Uint64)) (let $19 (StructType '($7 $17) '($10 $5) '('"_yql_sys_offset" $18) '($9 $18) '('"_yql_sys_seq_no" $18) '($8 $17) $12 $13)) (let $20 (DqPqTopicSource $2 $14 '('"x" '"y") $16 (SecureParam '"cluster:default_lb") '"" $19)) (let $21 '($7 $8 $9 '"_yql_sys_offset" $10 '"_yql_sys_seq_no")) (let $22 '('('"format" '"json") '('"metadataColumns" $21) '('"formatSettings" '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX"))) '('"settings" '($15)))) (let $23 (DqSourceWrap $20 (DataSource '"pq" '"lb") $19 $22)) (let $24 (PqWriteTopic! $2 $3 $6 (ExtractMembers $23 '('"x")) 'append '())) (return (Commit! $24 $3)) ) 2025-07-08 11:55:13.456 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_optimize.cpp:135: {32c70b70-c1c03f80-2d51032c-51bf76ab} LogicalOptimizer-ExtractMembersOverDqWrap ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 (StructType '('"x" $5))) (let $8 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 '() $7)) (let $9 '('"SharedReading" '"0")) (let $10 '('('"Consumer" '"my_test_consumer") '('"Endpoint" '"lb.ru") $9 '('"ReconnectPeriod" '"") '('"Format" '"json") '('"ReadGroup" '""))) (let $11 (DqPqTopicSource $2 $8 '('"x") $10 (SecureParam '"cluster:default_lb") '"" $7)) (let $12 '('('"format" '"json") '('"formatSettings" '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX"))) '('"settings" '($9)))) (let $13 (DqSourceWrap $11 (DataSource '"pq" '"lb") $7 $12)) (let $14 (PqWriteTopic! $2 $3 $6 $13 'append '())) (return (Commit! $14 $3)) ) 2025-07-08 11:55:13.457 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [DQ] yql_optimize.cpp:135: {32c70b70-c1c03f80-2d51032c-51bf76ab} DqsPhy-BuildStageWithSourceWrap ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSink '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $7 (DataSource '"pq" '"lb")) (let $8 (StructType '('"x" $5))) (let $9 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 '() $8)) (let $10 '('"SharedReading" '"0")) (let $11 '('('"Consumer" '"my_test_consumer") '('"Endpoint" '"lb.ru") $10 '('"ReconnectPeriod" '"") '('"Format" '"json") '('"ReadGroup" '""))) (let $12 (DqPqTopicSource $2 $9 '('"x") $11 (SecureParam '"cluster:default_lb") '"" $8)) (let $13 (DqStage '((DqSource $7 $12)) (lambda '($15) (block '( (let $16 '('('"format" '"json") '('"formatSettings" '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX"))) '('"settings" '($10)))) (let $17 (DqSourceWideWrap $15 $7 $8 $16)) (return (NarrowMap $17 (lambda '($18) (AsStruct '('"x" $18))))) ))) '('('"_logical_id" '200280)))) (let $14 (PqWriteTopic! $2 $3 $6 (DqCnUnionAll (TDqOutput $13 '"0")) 'append '())) (return (Commit! $14 $3)) ) 2025-07-08 11:55:13.458 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_pq_physical_optimize.cpp:91: {32c70b70-c1c03f80-2d51032c-51bf76ab} Optimize PqWriteTopic `lb`.`my_out_topic` 2025-07-08 11:55:13.458 INFO ydb-library-yql-providers-pq-provider-ut(pid=156151, tid=0x00007FB8C3FFCA80) [PQ] yql_optimize.cpp:95: {32c70b70-c1c03f80-2d51032c-51bf76ab} PhysicalOptimizer-PqWriteTopic ( (let $1 (Configure! world (DataSource '"config") '"DqEngine" '"force")) (let $2 (Configure! $1 (DataSource '"pq" '"$all") '"Attr" '"consumer" '"my_test_consumer")) (let $3 (DataSource '"pq" '"lb")) (let $4 '('('"PartitionsCount" '"1"))) (let $5 (DataType 'String)) (let $6 (StructType '('"x" $5))) (let $7 (PqTopic '"lb" '"/Root" '"my_in_topic" $4 '() $6)) (let $8 '('"Endpoint" '"lb.ru")) (let $9 '('"SharedReading" '"0")) (let $10 '('('"Consumer" '"my_test_consumer") $8 $9 '('"ReconnectPeriod" '"") '('"Format" '"json") '('"ReadGroup" '""))) (let $11 (SecureParam '"cluster:default_lb")) (let $12 (DqPqTopicSource $2 $7 '('"x") $10 $11 '"" $6)) (let $13 (DataSink '"pq" '"lb")) (let $14 (PqTopic '"lb" '"/Root" '"my_out_topic" $4 '() (StructType '('"Data" $5)))) (let $15 (DqPqTopicSink $14 '($8) $11)) (return (Commit! (DqQuery! $2 '((DqStage '((DqSource $3 $12)) (lambda '($16) (block '( (let $17 '('('"format" '"json") '('"formatSettings" '('('"data.datetime.formatname" '"POSIX") '('"data.timestamp.formatname" '"POSIX"))) '('"settings" '($9)))) (let $18 (DqSourceWideWrap $16 $3 $6 $17)) (return (NarrowMap $18 (lambda '($19) (AsStruct '('"x" $19))))) ))) '('('"_logical_id" '200280)) '((DqSink '"0" $13 $15))))) $13)) ) Done. |68.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator_builder/yaml_config-validator-ut-validator_builder |68.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut >> ResourcePoolTest::SettingsValidation [GOOD] >> ResourcePoolClassifierTest::SettingsExtracting [GOOD] >> ResourcePoolTest::IntSettingsParsing [GOOD] >> ResourcePoolClassifierTest::IntSettingsParsing [GOOD] >> ResourcePoolClassifierTest::StringSettingsParsing [GOOD] >> ResourcePoolClassifierTest::SettingsValidation [GOOD] >> ResourcePoolTest::PercentSettingsParsing [GOOD] >> ResourcePoolTest::SettingsExtracting [GOOD] >> ResourcePoolTest::SecondsSettingsParsing [GOOD] >> tool::import_test [GOOD] >> ValidatorBuilder::CanHaveDuplicateType [GOOD] |68.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/resource_pools/ut/unittest >> ResourcePoolTest::SecondsSettingsParsing [GOOD] |68.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/tool/import_test >> tool::import_test [GOOD] |68.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut >> ValidatorBuilder::BuildSimpleValidator [GOOD] >> ValidatorBuilder::CanHaveMultipleType [GOOD] >> ValidatorBuilder::CreateMultitypeNode [GOOD] >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] |68.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication >> TBlobStorageHullHugeChain::AllocFreeRestartAllocTest [GOOD] |68.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |68.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/tools/simple_json_diff/libpy3simple_json_diff.global.a >> TChainLayoutBuilder::TestProdConf [GOOD] >> TopTest::Test1 [GOOD] >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] >> TChainLayoutBuilder::TestMilestoneId [GOOD] >> TBlobStorageHullHugeChain::HeapAllocLargeStandard [GOOD] >> TopTest::Test2 [GOOD] >> TBsLocalRecovery::ChaoticWriteRestart [GOOD] >> TBlobStorageHullHugeChain::HeapAllocSmall [GOOD] >> TBlobStorageHullHugeHeap::WriteRestore [GOOD] >> TBlobStorageHullHugeHeap::RecoveryMode [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllReleaseAll [GOOD] >> TPDiskRaces::DecommitWithInflight [GOOD] >> THugeHeapCtxTests::Basic [GOOD] >> ShredPDisk::SimpleShredRepeat |67.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |67.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator_builder/unittest >> ValidatorBuilder::CanCreateAllTypesOfNodes [GOOD] |67.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHuge [GOOD] >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TBlobStorageHullHugeHeap::BorderValues [GOOD] >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TChainLayoutBuilder::TestMilestoneId [GOOD] |67.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp >> ShredPDisk::SimpleShredRepeat [GOOD] >> TVDiskConfigTest::JustConfig [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test1 [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::AllocFreeAllocTest [GOOD] |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/ut/ydb-core-blobstorage-crypto-ut >> TVDiskConfigTest::RtmrProblem1 [GOOD] >> TTrackable::TVector [GOOD] >> TCircleBufStringStreamTest::TestNotAligned [GOOD] >> TPDiskErrorStateTests::Basic [GOOD] >> TVDiskConfigTest::Basic [GOOD] >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] >> TVDiskConfigTest::RtmrProblem2 [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart >> TTrackable::TList [GOOD] >> TVDiskConfigTest::ThreeLevels [GOOD] >> TCircleBufStringStreamTest::TestOverflow [GOOD] >> TBlobStorageSyncNeighborsTest::SerDes [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads >> TTrackable::TString [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TopTest::Test2 [GOOD] >> TCircleBufTest::EmptyTest [GOOD] >> TResizableCircleBufTest::Test1 [GOOD] >> TBlobStorageSyncNeighborsTest::CheckRevLookup [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeKeeperPersState::SerializeParse [GOOD] >> TCircleBufTest::OverflowTest [GOOD] >> TPDiskErrorStateTests::Basic2 [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::BorderValues [GOOD] >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart [GOOD] >> TPDiskErrorStateTests::BasicErrorReason [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskIterators [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllFromOneChunk [GOOD] >> TResizableCircleBufTest::Test2 [GOOD] >> TBlobStorageSyncNeighborsTest::CheckIsMyDomain [GOOD] >> TCircleBufTest::SimpleTest [GOOD] >> TBlobStorageSyncNeighborsTest::IterateOverAllDisks [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeChain::HeapAllocLargeNonStandard [GOOD] >> TTrackable::TBuffer [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> THugeHeapCtxTests::Basic [GOOD] >> TCircleBufTest::PtrTest [GOOD] >> TLsnAllocTrackerTests::Test1 [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse >> TBlobStorageSyncNeighborsTest::CheckFailDomainsIterators [GOOD] >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> TCircleBufStringStreamTest::TestAligned [GOOD] >> FormatTimes::ParseDuration [GOOD] >> TErasurePerfTest::Restore [GOOD] >> TErasureSmallBlobSizePerfTest::StringErasureMode [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/huge/ut/unittest >> TBlobStorageHullHugeHeap::AllocateAllSerializeDeserializeReleaseAll [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::NoMoneyNoHoney [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufTest::OverflowTest [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TVDiskConfigTest::ThreeLevels [GOOD] >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] >> StatsFormat::FullStat [GOOD] >> FormatTimes::DurationMs [GOOD] >> StatsFormat::AggregateStat [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TString [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TPDiskErrorStateTests::BasicErrorReason [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TTrackable::TBuffer [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TBlobStorageSyncNeighborsTest::CheckVDiskDistance [GOOD] >> Config::ExcludeScope [GOOD] >> Config::IncludeScope [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TCircleBufStringStreamTest::TestAligned [GOOD] |67.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> FormatTimes::DurationUs [GOOD] |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut_perf/unittest >> TErasureSmallBlobSizePerfTest::ConvertToRopeMode [GOOD] |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] |67.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] >> Mirror3of4::ReplicationHuge [GOOD] >> kqprun_recipe::import_test [GOOD] >> TBlobStorageCrypto::TestOffsetStreamCypher [GOOD] >> TBlobStorageCrypto::UnalignedTestStreamCypher [GOOD] >> TBlobStorageCrypto::TestInplaceStreamCypher [GOOD] >> TBlobStorageCrypto::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher >> TBlobStorageCrypto::PerfTestStreamCypher [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |67.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |67.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndNoToken [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |67.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> ShredPDisk::SimpleShredRepeatAfterPDiskRestart [GOOD] Test command err: GREEN 0.5025125628 0 CYAN 0.8623115578 0.862 LIGHT_YELLOW 0.8934673367 0.893 YELLOW 0.9145728643 0.914 LIGHT_ORANGE 0.9306532663 0.93 PRE_ORANGE 0.9467336683 0.946 ORANGE 0.9668341709 0.966 RED 0.9879396985 0.987 BLACK 0.9979899497 0.997 2025-07-08T11:54:37.116824Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.117372Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14134694535759839504 MagicNextLogChunkReference: 14905784793364052678 MagicLogChunk: 12010384885701528703 MagicDataChunk: 16973861218415498776 MagicSysLogChunk: 8407881880982741394 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677066409 (2025-07-08T11:54:37.066409Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.119859Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.125138Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.125393Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.126175Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.126496Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1272946 CutLogId# [1:7524678237756148909:2050] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:37.191884Z node 2 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.192064Z node 2 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 5182088828766392663 MagicNextLogChunkReference: 12495416684459405823 MagicLogChunk: 10575353868739975951 MagicDataChunk: 4063592454974542850 MagicSysLogChunk: 15908587868488451785 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677168759 (2025-07-08T11:54:37.168759Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.193286Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.193962Z node 2 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.193978Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.194235Z node 2 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.194348Z node 2 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1762515 CutLogId# [2:7524678236975305122:2050] ownerRound# 3 PDiskId# 1 2025-07-08T11:54:37.272153Z node 3 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.281320Z node 3 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 17800145944727237450 MagicNextLogChunkReference: 2989408494123704581 MagicLogChunk: 8292320416675082218 MagicDataChunk: 5331067616854754907 MagicSysLogChunk: 12968333243067786229 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677239232 (2025-07-08T11:54:37.239232Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.282665Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.283484Z node 3 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.283504Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.283905Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.284053Z node 3 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1210060 CutLogId# [3:7524678237123488532:2050] ownerRound# 4 PDiskId# 1 2025-07-08T11:54:37.347429Z node 4 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.361158Z node 4 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4662085350724894150 MagicNextLogChunkReference: 217781191778403235 MagicLogChunk: 10646159074372311878 MagicDataChunk: 9304019043720842608 MagicSysLogChunk: 17596870231089103337 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677307695 (2025-07-08T11:54:37.307695Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.366075Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.373224Z node 4 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.373249Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.373587Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.377182Z node 4 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1753615 CutLogId# [4:7524678237938127540:2050] ownerRound# 5 PDiskId# 1 2025-07-08T11:54:37.441478Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:37.441659Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4773137853613263848 MagicNextLogChunkReference: 1456921172990313744 MagicLogChunk: 6784167876676022867 MagicDataChunk: 13701619656201265209 MagicSysLogChunk: 1225964415432464677 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975677423666 (2025-07-08T11:54:37.423666Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:37.442852Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:37.443480Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:37.443498Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:37.443741Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:37.443826Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1684837 CutLogId# [5:7524678238624805685:2050] ownerRound# 6 PDiskId# 1 2025-07- ... fsetInChunk# 0} PDiskId# 1 2025-07-08T11:55:17.108913Z node 835 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:17.109320Z node 835 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:17.109410Z node 835 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [342:_:0:0:0] FirstNonceToKeep# 1928557 CutLogId# [835:7524678408057884229:2050] ownerRound# 836 PDiskId# 1 2025-07-08T11:55:17.109661Z node 835 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:55:17.121327Z node 835 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:55:17.121355Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 sends compact request to VDisk# [342:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-07-08T11:55:17.121366Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [342:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:372 2025-07-08T11:55:17.121518Z node 835 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 836 Status# OK ShredGeneration# 1 ErrorReason# } 2025-07-08T11:55:17.121533Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:55:17.219478Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-07-08T11:55:17.219900Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-07-08T11:55:17.320665Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-07-08T11:55:17.320737Z node 835 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:55:17.320744Z node 835 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-07-08T11:55:17.320844Z node 835 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:55:17.320858Z node 835 :BS_PDISK_SHRED NOTICE: Registered one more shred requester at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:55:17.361970Z node 836 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:17.362187Z node 836 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13716342489410406906 MagicNextLogChunkReference: 14088283782595665535 MagicLogChunk: 6590143338294015629 MagicDataChunk: 3820938161778298459 MagicSysLogChunk: 765647995402589512 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975717342351 (2025-07-08T11:55:17.342351Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:17.363504Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:55:17.364015Z node 836 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:55:17.364041Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:17.364388Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:17.364755Z node 836 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [343:_:0:0:0] FirstNonceToKeep# 1717631 CutLogId# [836:7524678408341955845:2050] ownerRound# 837 PDiskId# 1 2025-07-08T11:55:17.365531Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:55:17.365892Z node 836 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 0 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:55:17.365909Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 sends compact request to VDisk# [343:4294967295:0:0:0] ownerId# 3 request# {EvPreShredCompactVDisk ShredGeneration# 1} 2025-07-08T11:55:17.365953Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 ShredGeneration# 1 is waiting for ownerId# 3 before finishing pre-shred compact VDiskId# [343:4294967295:0:0:0] VDiskStatus# logged ShredState# "Compact requested" /home/runner/actions_runner/_work/ydb/ydb/ydb/core/blobstorage/pdisk/blobstorage_pdisk_ut_env.h:372 2025-07-08T11:55:17.365999Z node 836 :BS_PDISK_SHRED DEBUG: ProcessPreShredCompactVDiskResult at PDisk# 1 ShredGeneration# 1 request# TPreShredCompactVDiskResult { Owner# 3 OwnerRound# 837 Status# OK ShredGeneration# 1 ErrorReason# } 2025-07-08T11:55:17.366009Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all pre-shred compact VDisk requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:55:17.436158Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 Is now waiting for VDisks to cut their log, requestsSent# 1 ShredGeneration# 1 2025-07-08T11:55:17.436907Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 found unshredded free chunk# 1 ShredGeneration# 1 2025-07-08T11:55:17.533210Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 is done shredding chunk ChunkBeingShredded# 1 2025-07-08T11:55:17.533288Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:55:17.533294Z node 836 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-07-08T11:55:17.533531Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [343:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 3 FirstNonceToKeep: 1717631 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 3}} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 2} Free ChunkIds: {1, 3..982} PDiskId# 1 2025-07-08T11:55:17.535981Z node 836 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:55:17.536172Z node 836 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 13716342489410406906 MagicNextLogChunkReference: 14088283782595665535 MagicLogChunk: 6590143338294015629 MagicDataChunk: 3820938161778298459 MagicSysLogChunk: 765647995402589512 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975717342351 (2025-07-08T11:55:17.342351Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:55:17.537372Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1582243 NonceLog# 1751305 NonceData# 1416559} LogHeadChunkIdx# 2 LogHeadChunkPreviousNonce# 1750907 Owner[3]# [343:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:55:17.538377Z node 836 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 2 SectorIdx# 397 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 2 OffsetInChunk# 1626112} PDiskId# 1 2025-07-08T11:55:17.538403Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 2 OffsetInChunk# 1626112} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:55:17.538643Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:55:17.538934Z node 836 :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [343:4294967295:0:0:0] OwnerId# 3 OwnerRound# 838 PDiskId# 1 2025-07-08T11:55:17.538957Z node 836 :BS_PDISK_SHRED DEBUG: PDisk# 1 has finished all shred requests ShredGeneration# 1 finishedCount# 1 2025-07-08T11:55:17.538963Z node 836 :BS_PDISK_SHRED NOTICE: Shred request is finished at PDisk# 1 ShredGeneration# 1 2025-07-08T11:55:17.539742Z node 836 :BS_PDISK WARN: {LR004@blobstorage_pdisk_logreader.cpp:838} PDiskId# 1 LogReader IsInitial# 0 Owner# 3 VDiskId# [343:_:0:0:0] ChunkIdx# 2 SectorIdx# 398 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags outside the LogEndSector LogEndChunkIdx# 2 LogEndSectorIdx# 398 PDiskId# 1 2025-07-08T11:55:17.539760Z node 836 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 1} PDiskId# 1 2025-07-08T11:55:17.540355Z node 836 :BS_PDISK_SHRED CRIT: ProcessShredPDisk with IS_SHRED_ENABLED# false at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} 2025-07-08T11:55:17.540364Z node 836 :BS_PDISK_SHRED NOTICE: Registered one more shred requester at PDisk# 1 ShredGeneration# 1 request# TShredPDisk { Owner# 0 OwnerRound# 0 ShredGeneration# 1} |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TLsnMngrTests::AllocLsnForLocalUse [GOOD] |67.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/cppcoro/ut/ydb-library-actors-cppcoro-ut >> simple_json_diff::import_test [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndNoToken [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSidWithGroup [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] |67.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndInvalidTokenSerialized [GOOD] |67.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut |67.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/tools/simple_json_diff/import_test >> simple_json_diff::import_test [GOOD] >> TBlobStorageCryptoRope::TestEqualInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthTokenAllowed::FailOnListAndNoToken [GOOD] >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::FailOnListAndEmptyToken [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSidWithGroup [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndNoToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndEmptyToken [GOOD] >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnListMatchUserSid [GOOD] |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndEmptyToken [GOOD] >> HugeCluster::AllToOne [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndEmptyToken [GOOD] >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] |67.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListAndTokenWithEmptyUserSid [GOOD] >> AuthTokenAllowed::PassOnListMatchGroupSid [GOOD] >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> AuthDatabaseAdmin::PassOnOwnerMatchGroupSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSid [GOOD] >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndToken [GOOD] |67.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_mirror3of4/unittest >> Mirror3of4::ReplicationHuge [GOOD] Test command err: 2025-07-08T11:55:05.344261Z 1 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:0:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344320Z 2 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:1:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344350Z 3 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:2:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344373Z 4 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:3:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344398Z 5 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:4:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344423Z 6 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:5:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344475Z 7 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:6:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344513Z 8 00h00m00.000000s :BS_SKELETON INFO: VDISK[0:_:0:7:0]: SKELETON START Marker# BSVS37 2025-07-08T11:55:05.344589Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:0:0]: LocalRecovery START 2025-07-08T11:55:05.344603Z 1 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:0:0]: Sending TEvYardInit: pdiskGuid# 13092001980481176065 skeletonid# [1:139:13] selfid# [1:155:22] delay 0.000000 sec 2025-07-08T11:55:05.344610Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:1:0]: LocalRecovery START 2025-07-08T11:55:05.344616Z 2 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:1:0]: Sending TEvYardInit: pdiskGuid# 16054802353444150401 skeletonid# [2:140:11] selfid# [2:156:12] delay 0.000000 sec 2025-07-08T11:55:05.344621Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:2:0]: LocalRecovery START 2025-07-08T11:55:05.344627Z 3 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:2:0]: Sending TEvYardInit: pdiskGuid# 3692145322175964167 skeletonid# [3:141:11] selfid# [3:157:12] delay 0.000000 sec 2025-07-08T11:55:05.344631Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:3:0]: LocalRecovery START 2025-07-08T11:55:05.344636Z 4 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:3:0]: Sending TEvYardInit: pdiskGuid# 12989306286023112868 skeletonid# [4:142:11] selfid# [4:158:12] delay 0.000000 sec 2025-07-08T11:55:05.344641Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:4:0]: LocalRecovery START 2025-07-08T11:55:05.344646Z 5 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:4:0]: Sending TEvYardInit: pdiskGuid# 5210808106937779323 skeletonid# [5:143:11] selfid# [5:159:12] delay 0.000000 sec 2025-07-08T11:55:05.344650Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:5:0]: LocalRecovery START 2025-07-08T11:55:05.344665Z 6 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:5:0]: Sending TEvYardInit: pdiskGuid# 11323309884220736019 skeletonid# [6:144:11] selfid# [6:160:12] delay 0.000000 sec 2025-07-08T11:55:05.344671Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:6:0]: LocalRecovery START 2025-07-08T11:55:05.344676Z 7 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:6:0]: Sending TEvYardInit: pdiskGuid# 18369470131514548023 skeletonid# [7:145:11] selfid# [7:161:12] delay 0.000000 sec 2025-07-08T11:55:05.344681Z 8 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:7:0]: LocalRecovery START 2025-07-08T11:55:05.344686Z 8 00h00m00.000000s :BS_LOCALRECOVERY DEBUG: VDISK[0:_:0:7:0]: Sending TEvYardInit: pdiskGuid# 7071870700020277250 skeletonid# [8:146:11] selfid# [8:162:12] delay 0.000000 sec 2025-07-08T11:55:05.344820Z 1 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[1:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:0:0] PDiskGuid# 13092001980481176065 CutLogID# [1:139:13] WhiteboardProxyId# [1:122:10]} 2025-07-08T11:55:05.345029Z 1 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[1:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345048Z 2 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[2:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:1:0] PDiskGuid# 16054802353444150401 CutLogID# [2:140:11] WhiteboardProxyId# [2:124:10]} 2025-07-08T11:55:05.345056Z 2 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[2:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345063Z 3 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[3:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:2:0] PDiskGuid# 3692145322175964167 CutLogID# [3:141:11] WhiteboardProxyId# [3:126:10]} 2025-07-08T11:55:05.345070Z 3 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[3:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345077Z 4 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[4:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:3:0] PDiskGuid# 12989306286023112868 CutLogID# [4:142:11] WhiteboardProxyId# [4:128:10]} 2025-07-08T11:55:05.345083Z 4 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[4:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345089Z 5 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[5:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:4:0] PDiskGuid# 5210808106937779323 CutLogID# [5:143:11] WhiteboardProxyId# [5:130:10]} 2025-07-08T11:55:05.345096Z 5 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[5:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345102Z 6 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[6:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:5:0] PDiskGuid# 11323309884220736019 CutLogID# [6:144:11] WhiteboardProxyId# [6:132:10]} 2025-07-08T11:55:05.345108Z 6 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[6:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345116Z 7 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[7:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:6:0] PDiskGuid# 18369470131514548023 CutLogID# [7:145:11] WhiteboardProxyId# [7:134:10]} 2025-07-08T11:55:05.345122Z 7 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[7:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345129Z 8 00h00m00.000000s :BS_PDISK NOTICE: {PDM01@pdisk_mock.cpp:451} PDiskMock[8:1] received TEvYardInit Msg# {EvYardInit ownerRound# 2 VDisk# [0:4294967295:0:7:0] PDiskGuid# 7071870700020277250 CutLogID# [8:146:11] WhiteboardProxyId# [8:136:10]} 2025-07-08T11:55:05.345135Z 8 00h00m00.000000s :BS_PDISK INFO: {PDM02@pdisk_mock.cpp:486} PDiskMock[8:1] sending TEvYardInitResult Msg# {EvYardInitResult Status# OK ErrorReason# "" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 1 ownerRound# 2 ChunkSize# 134217728 AppendBlockSize# 4096 RecommendedReadSize# 45056 SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 BulkWriteBlockSize# 65536 PrefetchSizeBytes# 209715 GlueRequestDistanceBytes# 41943}} OwnedChunks# {}} Created# true 2025-07-08T11:55:05.345531Z 1 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:0:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.345792Z 2 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:1:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.345973Z 3 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:2:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.346183Z 4 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:3:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.346389Z 5 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:4:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.346585Z 6 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:5:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-07-08T11:55:05.346810Z 7 00h00m00.000000s :BS_LOCALRECOVERY NOTICE: VDISK[0:_:0:6:0]: MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2025-0 ... g ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-07-08T11:55:18.554251Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [7:345:29] 2025-07-08T11:55:18.554275Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:583} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 25 LsnSegmentStart# 25 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-07-08T11:55:18.554281Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 25 Cookie# 0}} Recipient# [8:355:29] 2025-07-08T11:55:18.554717Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:583} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-07-08T11:55:18.554732Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [7:345:29] 2025-07-08T11:55:18.554746Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:583} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 26 LsnSegmentStart# 26 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-07-08T11:55:18.554752Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 26 Cookie# 0}} Recipient# [8:355:29] 2025-07-08T11:55:18.554802Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.554848Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:583} PDiskMock[7:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:6:0] 2025-07-08T11:55:18.554855Z 7 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[7:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [7:345:29] 2025-07-08T11:55:18.554863Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: GLUEREAD(0x55b33facab30): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} 2025-07-08T11:55:18.554871Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM11@pdisk_mock.cpp:583} PDiskMock[8:1] received TEvLog Msg# {EvLog ownerId# 1 ownerRound# 2 Signature# 138 DataSize# 602 Lsn# 27 LsnSegmentStart# 27 Cookie# 0{CommitRecord FirstLsnToKeep# 0 IsStartingPoint# 1 DeleteToDecommitted# 0 CommitChunks# [] DeleteChunks# [] DirtyChunks# []}} VDiskId# [0:4294967295:0:7:0] 2025-07-08T11:55:18.554876Z 8 00h00m00.000000s :BS_PDISK DEBUG: {PDM12@pdisk_mock.cpp:645} PDiskMock[8:1] sending TEvLogResult Msg# {EvLogResult Status# OK ErrorReason# "" StatusFlags# None LogChunkCount# 0{Lsn# 27 Cookie# 0}} Recipient# [8:355:29] 2025-07-08T11:55:18.554886Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:735} PDiskMock[2:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} VDiskId# [0:4294967295:0:1:0] 2025-07-08T11:55:18.554997Z 2 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:775} PDiskMock[2:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 94228346608144 StatusFlags# None} 2025-07-08T11:55:18.555019Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: GLUEREAD FINISHED(0x55b33facab30): actualReadN# 1 origReadN# 1 2025-07-08T11:55:18.555057Z 2 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:1:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1369701526376808448} BlockedGeneration# 0} 2025-07-08T11:55:18.555568Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.555681Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: GLUEREAD(0x55b33facab30): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} 2025-07-08T11:55:18.555727Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:735} PDiskMock[3:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} VDiskId# [0:4294967295:0:2:0] 2025-07-08T11:55:18.555823Z 3 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:775} PDiskMock[3:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 94228346608144 StatusFlags# None} 2025-07-08T11:55:18.555842Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: GLUEREAD FINISHED(0x55b33facab30): actualReadN# 1 origReadN# 1 2025-07-08T11:55:18.555856Z 3 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:2:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:1] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 2522623030983655424} BlockedGeneration# 0} 2025-07-08T11:55:18.556189Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:3:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.556228Z 4 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:3:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-07-08T11:55:18.556297Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:4:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.556324Z 5 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:4:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 793240774073384960} BlockedGeneration# 0} 2025-07-08T11:55:18.556377Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.556406Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: GLUEREAD(0x55b33fad4d70): {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} 2025-07-08T11:55:18.556416Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM13@pdisk_mock.cpp:735} PDiskMock[6:1] received TEvChunkRead Msg# {EvChunkRead chunkIdx# 1 Offset# 5 Size# 1048576 ownerId# 1 ownerRound# 2 PriorityClass# 2 Cookie# 94228346608144} VDiskId# [0:4294967295:0:5:0] 2025-07-08T11:55:18.556497Z 6 00h00m00.000000s :BS_PDISK DEBUG: {PDM14@pdisk_mock.cpp:775} PDiskMock[6:1] sending TEvChunkReadResult Msg# {EvChunkReadres Status# OK ErrorReason# "" chunkIdx# 1 Offset# 5 DataSize# 1048576 Cookie# 94228346608144 StatusFlags# None} 2025-07-08T11:55:18.556507Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: GLUEREAD FINISHED(0x55b33fad4d70): actualReadN# 1 origReadN# 1 2025-07-08T11:55:18.556523Z 6 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:5:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:2] OK Size# 1048576 FullDataSize# 1048576 PayloadId# 0 Data# 1048576b Ingress# 1946162278680231936} {[1:1:1:0:0:1048576:3] OK Size# 0 FullDataSize# 1048576 BufferData# Ingress# 1946162278680231936} BlockedGeneration# 0} 2025-07-08T11:55:18.556855Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:6:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.556892Z 7 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:6:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} 2025-07-08T11:55:18.556981Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:7:0]: TEvVGet: {ExtrQuery# [1:1:1:0:0:1048576:0] sh# 0 sz# 0} {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 1680000 ExtQueueId# GetFastRead IntQueueId# IntGetFast CostSettings# { SeekTimeUs# 40 ReadSpeedBps# 1048576000 WriteSpeedBps# 1048576000 ReadBlockSize# 65536 WriteBlockSize# 65536 MinHugeBlobInBytes# 65537} SendMeCostSettings# 1} Notify# 0 Internals# 1 TabletId# 0 AcquireBlockedGeneration# 0 ForceBlockedGeneration# 0} Marker# BSVS14 2025-07-08T11:55:18.557007Z 8 00h00m00.000000s :BS_VDISK_GET DEBUG: VDISK[0:_:0:7:0]: TEvVGetResult: {EvVGetResult QueryResult Status# OK {[1:1:1:0:0:1048576:0] NODATA Ingress# 216780021769961472} BlockedGeneration# 0} |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::PassOnOwnerMatchUserSid [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnEmptyOwnerAndTokenWithEmptyUserSid [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::FailOnListMatchGroupSid [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/interconnect/ut_huge_cluster/unittest >> HugeCluster::AllToOne [GOOD] |67.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthDatabaseAdmin::FailOnOwnerAndTokenWithEmptyUserSidAndGroups [GOOD] |67.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_auth/unittest >> AuthTokenAllowed::PassOnEmptyListAndTokenWithEmptyUserSidAndGroups [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse2Threads [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads |66.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |66.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/libpy3benchmarks-runner-runner.global.a >> common/__init__.py::flake8 [GOOD] >> common/composite_assert.py::flake8 [GOOD] >> common/generators.py::flake8 [GOOD] >> common/local_db_scheme.py::flake8 [GOOD] >> common/path_types.py::flake8 [GOOD] >> common/protobuf_cms.py::flake8 [GOOD] >> common/protobuf_kv.py::flake8 [GOOD] >> common/types.py::flake8 [GOOD] >> common/workload_manager.py::flake8 [GOOD] >> harness/__init__.py::flake8 [GOOD] >> harness/kikimr_cluster.py::flake8 [GOOD] >> harness/kikimr_config.py::flake8 [GOOD] >> harness/kikimr_port_allocator.py::flake8 [GOOD] >> harness/param_constants.py::flake8 [GOOD] >> harness/util.py::flake8 [GOOD] >> kv/__init__.py::flake8 [GOOD] >> matchers/__init__.py::flake8 [GOOD] >> matchers/datashard_matchers.py::flake8 [GOOD] >> matchers/response_matchers.py::flake8 [GOOD] |65.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut >> matchers/tablets.py::flake8 [GOOD] >> nemesis/nemesis_core.py::flake8 [GOOD] >> nemesis/nemesis_process_killers.py::flake8 [GOOD] >> nemesis/network/__init__.py::flake8 [GOOD] >> nemesis/remote_execution.py::flake8 [GOOD] >> predicates/__init__.py::flake8 [GOOD] >> predicates/executor.py::flake8 [GOOD] >> predicates/tx.py::flake8 [GOOD] |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/flake8 >> predicates/tx.py::flake8 [GOOD] |65.6%| [TA] $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/libpy3tstool.global.a |65.6%| [TA] $(B)/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> common/cms.py::flake8 [GOOD] >> common/delayed.py::flake8 [GOOD] >> common/helpers.py::flake8 [GOOD] >> common/msgbus_types.py::flake8 [GOOD] >> common/protobuf.py::flake8 [GOOD] >> common/protobuf_console.py::flake8 [GOOD] >> common/protobuf_ss.py::flake8 [GOOD] >> common/wait_for.py::flake8 [GOOD] >> common/yatest_common.py::flake8 [GOOD] >> harness/daemon.py::flake8 [GOOD] >> harness/kikimr_cluster_interface.py::flake8 [GOOD] >> harness/kikimr_node_interface.py::flake8 [GOOD] >> harness/kikimr_runner.py::flake8 [GOOD] >> TBlobStorageCryptoRope::TestEqualMixedStreamCypher [GOOD] >> harness/tls_tools.py::flake8 [GOOD] |65.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests >> TBlobStorageCryptoRope::TestMixedStreamCypher >> harness/ydbd_slice.py::flake8 [GOOD] >> kv/helpers.py::flake8 [GOOD] >> matchers/collection.py::flake8 [GOOD] >> matchers/response.py::flake8 [GOOD] >> matchers/scheme_ops.py::flake8 [GOOD] >> nemesis/__init__.py::flake8 [GOOD] >> nemesis/nemesis_network.py::flake8 [GOOD] >> nemesis/nemesis_time_terrorist.py::flake8 [GOOD] >> nemesis/network/client.py::flake8 [GOOD] >> nemesis/safety_warden.py::flake8 [GOOD] >> predicates/blobstorage.py::flake8 [GOOD] >> TBlobStorageCryptoRope::TestMixedStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher >> predicates/hive.py::flake8 [GOOD] >> TBlobStorageCryptoRope::TestOffsetStreamCypher [GOOD] >> TBlobStorageCryptoRope::TestInplaceStreamCypher [GOOD] >> TBlobStorageCryptoRope::PerfTestStreamCypher >> TBlobStorageCryptoRope::PerfTestStreamCypher [GOOD] >> TBlobStorageCryptoRope::UnalignedTestStreamCypher [GOOD] >> TChaCha::KeystreamTest1 [GOOD] >> TChaCha::KeystreamTest2 [GOOD] >> TChaCha::KeystreamTest3 [GOOD] >> TChaCha::KeystreamTest4 [GOOD] >> TChaCha::KeystreamTest5 [GOOD] >> TChaCha::KeystreamTest6 [GOOD] >> TChaCha::KeystreamTest7 [GOOD] >> TChaCha::KeystreamTest8 [GOOD] >> TChaCha::MultiEncipherOneDecipher [GOOD] >> TChaCha::SecondBlock [GOOD] >> TChaCha512::KeystreamTest1 [GOOD] >> TChaCha512::KeystreamTest2 [GOOD] >> TChaCha512::KeystreamTest3 [GOOD] |65.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/flake8 >> predicates/hive.py::flake8 [GOOD] >> TChaCha512::KeystreamTest4 [GOOD] >> TChaCha512::KeystreamTest5 [GOOD] >> TChaCha512::KeystreamTest6 [GOOD] >> TChaCha512::KeystreamTest7 [GOOD] >> TChaCha512::KeystreamTest8 [GOOD] >> TChaCha512::MultiEncipherOneDecipher [GOOD] >> TChaCha512::SecondBlock [GOOD] >> TChaCha512::CompatibilityTest >> TChaCha512::CompatibilityTest [GOOD] >> TChaChaVec::KeystreamTest1 [GOOD] >> TChaChaVec::KeystreamTest2 [GOOD] >> TChaChaVec::KeystreamTest3 [GOOD] >> TChaChaVec::KeystreamTest4 [GOOD] >> TChaChaVec::KeystreamTest5 [GOOD] >> TChaChaVec::KeystreamTest6 [GOOD] >> TChaChaVec::KeystreamTest7 [GOOD] >> TChaChaVec::KeystreamTest8 [GOOD] >> TChaChaVec::MultiEncipherOneDecipher [GOOD] >> TChaChaVec::SecondBlock [GOOD] >> TChaChaVec::CompatibilityTest >> TChaChaVec::CompatibilityTest [GOOD] >> TPoly1305::TestVector1 [GOOD] >> TPoly1305::TestVector2 [GOOD] >> TPoly1305::TestVector3 [GOOD] >> TPoly1305::TestVector4 [GOOD] >> TPoly1305Vec::TestVector1 [GOOD] >> TPoly1305Vec::TestVector2 [GOOD] >> TPoly1305Vec::TestVector3 [GOOD] >> TPoly1305Vec::TestVector4 [GOOD] >> TTest_t1ha::TestZeroInputHashIsNotZero [GOOD] >> TTest_t1ha::PerfTest [GOOD] >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |65.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |65.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/crypto/ut/unittest >> TTest_t1ha::T1haHashResultsStablilityTest [GOOD] |65.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/libpy3ydb_configure.global.a |65.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api >> ydb-tests-functional-query_cache::import_test [GOOD] |65.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |64.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/validator/ut/validator/ydb-library-yaml_config-validator-ut-validator |64.9%| [TA] $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPDiskUtil::DriveEstimator [GOOD] |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon >> TPDiskUtil::OffsetParsingCorrectness [GOOD] |64.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage >> TPDiskUtil::PayloadParsingTest [GOOD] >> TPDiskUtil::SectorRestorator [GOOD] >> TPDiskUtil::SectorPrint [GOOD] >> TPDiskUtil::SectorMap [GOOD] >> TPDiskUtil::FormatSectorMap [GOOD] >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] >> TLsnMngrTests::AllocLsnForLocalUse10Threads [GOOD] >> TOutOfSpaceStateTests::TestLocal [GOOD] >> TOutOfSpaceStateTests::TestGlobal [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskUtil::SectorMapStoreLoadFromFile [GOOD] Test command err: 2025-07-08T11:54:21.238368Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:21.238612Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 12786919213118801615 MagicNextLogChunkReference: 11555641458774044118 MagicLogChunk: 5543761861124458977 MagicDataChunk: 14425454681314202419 MagicSysLogChunk: 10472331556321334293 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975661220232 (2025-07-08T11:54:21.220232Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:21.240023Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:21.241365Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:21.241520Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:21.242041Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:21.242776Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:21.243239Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 3 PDiskId# 1 2025-07-08T11:54:21.243261Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 3 ownerRound# 2 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.243606Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 4 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 3 PDiskId# 1 2025-07-08T11:54:21.243953Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 4 PDiskId# 1 2025-07-08T11:54:21.243961Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 4 ownerRound# 3 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.244301Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 5 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 4 PDiskId# 1 2025-07-08T11:54:21.244633Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 5 PDiskId# 1 2025-07-08T11:54:21.244640Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 5 ownerRound# 4 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.244998Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 6 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 5 PDiskId# 1 2025-07-08T11:54:21.245364Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 6 PDiskId# 1 2025-07-08T11:54:21.245378Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 6 ownerRound# 5 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.245639Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 7 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 6 PDiskId# 1 2025-07-08T11:54:21.245937Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 7 PDiskId# 1 2025-07-08T11:54:21.245943Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 7 ownerRound# 6 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.246237Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 8 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 7 PDiskId# 1 2025-07-08T11:54:21.246588Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 8 PDiskId# 1 2025-07-08T11:54:21.246597Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 8 ownerRound# 7 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.246886Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 9 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 8 PDiskId# 1 2025-07-08T11:54:21.247283Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 9 PDiskId# 1 2025-07-08T11:54:21.247303Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 9 ownerRound# 8 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.247717Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 10 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 9 PDiskId# 1 2025-07-08T11:54:21.248198Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 10 PDiskId# 1 2025-07-08T11:54:21.248215Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 10 ownerRound# 9 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.248655Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 11 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 10 PDiskId# 1 2025-07-08T11:54:21.249061Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 11 PDiskId# 1 2025-07-08T11:54:21.249073Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 11 ownerRound# 10 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.249398Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 12 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 11 PDiskId# 1 2025-07-08T11:54:21.249729Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 12 PDiskId# 1 2025-07-08T11:54:21.249740Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 12 ownerRound# 11 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.250038Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 13 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 12 PDiskId# 1 2025-07-08T11:54:21.250418Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 13 PDiskId# 1 2025-07-08T11:54:21.250428Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 13 ownerRound# 12 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.251053Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 14 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 13 PDiskId# 1 2025-07-08T11:54:21.251686Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 14 PDiskId# 1 2025-07-08T11:54:21.251699Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 14 ownerRound# 13 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.252258Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 15 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 14 PDiskId# 1 2025-07-08T11:54:21.252581Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 15 PDiskId# 1 2025-07-08T11:54:21.252590Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 15 ownerRound# 14 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.253046Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 16 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 15 PDiskId# 1 2025-07-08T11:54:21.253460Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 16 PDiskId# 1 2025-07-08T11:54:21.253470Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 16 ownerRound# 15 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.253912Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 17 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 16 PDiskId# 1 2025-07-08T11:54:21.254370Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 17 PDiskId# 1 2025-07-08T11:54:21.254380Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 17 ownerRound# 16 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.254812Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 18 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 17 PDiskId# 1 2025-07-08T11:54:21.255565Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 18 PDiskId# 1 2025-07-08T11:54:21.255572Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 18 ownerRound# 17 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.255921Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 19 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 18 PDiskId# 1 2025-07-08T11:54:21.256241Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 19 PDiskId# 1 2025-07-08T11:54:21.256249Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 19 ownerRound# 18 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.256729Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 20 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1537906 CutLogId# [0:0:0] ownerRound# 19 PDiskId# 1 2025-07-08T11:54:21.257020Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:2135} removed owner from chunks Keeper OwnerId# 20 PDiskId# 1 2025-07-08T11:54:21.257036Z node 1 :BS_PDISK NOTICE: {BPD12@blobstorage_pdisk_impl.cpp:2183} KillOwner ownerId# 20 ownerRound# 19 VDiskId# [0:_:0:0:0] lastSeenLsn# 0 PDiskId# 1 2025-07-08T11:54:21.257241Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 21 vDiskId# [0:_:0:0:0] Fir ... 0} PDiskId# 1 2025-07-08T11:54:26.260313Z node 4 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:26.441291Z node 5 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:26.441771Z node 5 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 4110166856142050975 MagicNextLogChunkReference: 11911233327541122031 MagicLogChunk: 306398435602218254 MagicDataChunk: 8041432814890705929 MagicSysLogChunk: 15232050417187017850 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975666410954 (2025-07-08T11:54:26.410954Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:26.442993Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:26.444923Z node 5 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:26.444969Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:26.445400Z node 5 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:26.448041Z node 5 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1124891 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:26.480509Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:26.480706Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 17783187546827610986 MagicNextLogChunkReference: 3075598103577799376 MagicLogChunk: 17791473492562284586 MagicDataChunk: 10940768481120587992 MagicSysLogChunk: 3352258284149317174 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975666464195 (2025-07-08T11:54:26.464195Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:26.482004Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:26.483751Z node 6 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:26.483787Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:26.484078Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:26.484586Z node 6 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2032413 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:26.485020Z node 6 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [0:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 2032413 StartingPoints: {} Owned chunkIds: {}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..982} PDiskId# 1 2025-07-08T11:54:26.486607Z node 6 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:1120} HandlePoison, PDiskThread stopped PDiskId# 1 2025-07-08T11:54:26.487683Z node 6 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:26.488299Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17783187546827610986 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:54:26.550067Z node 7 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:26.550337Z node 7 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14910373905501416984 MagicNextLogChunkReference: 6778490571071859297 MagicLogChunk: 11717852321878349264 MagicDataChunk: 8629607840872337317 MagicSysLogChunk: 8920694932247645770 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975666524758 (2025-07-08T11:54:26.524758Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:26.557302Z node 7 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:26.561721Z node 7 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:26.561755Z node 7 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:26.562062Z node 7 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:26.565890Z node 7 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1999177 CutLogId# [7:7524678190406905416:2050] ownerRound# 14 PDiskId# 1 2025-07-08T11:54:26.566303Z node 7 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 2025-07-08T11:54:26.567105Z node 7 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# {{OwnerId: 3 VDiskId: [2:_:0:0:0] ChunkWrites: 0 ChunkReads: 0 LogWrites: 0 LogReader: 0 CurrentFirstLsnToKeep: 0 FirstNonceToKeep: 1999177 StartingPoints: {{TLogRecord Signature# First Data.Size()# 1 Lsn# 1}} Owned chunkIds: {2}} PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {3..982} PDiskId# 1 2025-07-08T11:54:26.584422Z node 7 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:26.584851Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14910373905501416984 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 >> Validator::IntValidation |64.7%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Validator::IntValidation [GOOD] >> Validator::BoolValidation [GOOD] >> Validator::StringValidation [GOOD] >> Validator::IntArrayValidation [GOOD] >> Validator::MapValidation [GOOD] >> Validator::MultitypeNodeValidation [GOOD] >> Validator::OpaqueMaps [GOOD] >> Validator::Enums [GOOD] |64.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/common/ut/unittest >> TOutOfSpaceStateTests::TestGlobal [GOOD] |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |64.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/yaml_config/validator/ut/validator/unittest >> Validator::Enums [GOOD] |64.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] |64.6%| [TA] $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |64.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |64.6%| [TA] $(B)/ydb/tests/library/test-results/flake8/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsLocalRecovery::ChaoticWriteRestartHugeDecreased [GOOD] Test command err: 2025-07-08T11:55:03.981059Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2828} PDiskId# 1 ownerId# 3 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 3 ownerRound# 101 lsn# 16 PDiskId# 1 2025-07-08T11:55:04.702267Z :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2828} PDiskId# 1 ownerId# 4 invalid OwnerRound, got# 101 expected# 151 error in TLogWrite for ownerId# 4 ownerRound# 101 lsn# 13 PDiskId# 1 |64.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp >> SharedThreads::RegistrationAndPassingAwayActorsLazy [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsTail |64.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |64.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/libpy3nemesis.global.a >> TBtreeIndexTPartLarge::CutKeys [GOOD] >> TBtreeIndexTPartLarge::Group |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/transfer |64.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/runner/runner/runner >> Interconnect::SessionContinuation [GOOD] >> LargeMessage::Test |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans >> LargeMessage::Test [GOOD] >> OutgoingStream::Basic |63.7%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake >> runner::import_test [GOOD] >> TPDiskRaces::KillOwnerWhileDeletingChunkWithInflightMock [GOOD] >> TPDiskRaces::DecommitWithInflightMock |63.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/library/benchmarks/runner/runner/import_test >> runner::import_test [GOOD] |63.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |63.3%| RESOURCE $(sbr:4966407557) |63.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] >> ydb-tests-functional-rename::import_test [GOOD] >> ydb-tests-functional-api::import_test [GOOD] |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |63.2%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/rename/import_test >> ydb-tests-functional-rename::import_test [GOOD] |63.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |63.2%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so >> ydb-tests-functional-blobstorage::import_test [GOOD] |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/benchmarks_init/ydb-tests-functional-benchmarks_init |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/ut/ydb-core-base-generated-ut |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so >> OutgoingStream::Basic [GOOD] >> TPollerActorTest::Registration [GOOD] >> TPollerActorTest::ReadNotification [GOOD] >> TPollerActorTest::WriteNotification |63.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal |62.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/ut/ydb-core-erasure-ut >> test_init.py::TestTpcdsInit::test_s1_column_decimal >> test_generator.py::TestTpchGenerator::test_s1_parts >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpchInit::test_s1_s3 >> test_init.py::TestTpcdsInit::test_s1_column_decimal [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so >> test_init.py::TestTpchInit::test_s1_row >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/oltp_workload/tests/import_test >> ydb-tests-stress-oltp_workload-tests::import_test [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] >> test_init.py::TestTpchInit::test_s1_row [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_parts >> test_init.py::TestClickbenchInit::test_s1_column >> test_init.py::TestTpchInit::test_s1_column >> test_init.py::TestClickbenchInit::test_s1_s3 >> TPollerActorTest::WriteNotification [GOOD] >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_column [GOOD] >> TPollerActorTest::HangupNotification >> test_init.py::TestTpchInit::test_s1_column [GOOD] >> test_init.py::TestClickbenchInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_s3 >> test_init.py::TestClickbenchInit::test_s1_row >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] >> TPollerActorTest::HangupNotification [GOOD] >> Sticking::Check >> test_init.py::TestTpcdsInit::test_s1_row >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] >> test_init.py::TestTpcdsInit::test_s1_column |62.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal [GOOD] >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] |62.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |62.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column_decimal_ydb [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_row [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column [GOOD] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_s3 [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_s3 [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpchInit::test_s1_column_decimal_ydb [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_column [GOOD] >> TErasureTypeTest::TestMirror3LossOfAllPossible3 >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestClickbenchInit::test_s1_row [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_init.py::TestTpcdsInit::test_s1_row [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore0 >> TErasureTypeTest::TestBlock43LossOfAllPossible3 |62.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/large_serializable/ydb-tests-functional-large_serializable >> TErasureTypeTest::TestBlock31LossOfAllPossible1 |62.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TErasureTypeTest::TestStripe42LossOfAllPossible2 >> TErasureTypeTest::TestBlockByteOrder >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestMirror3LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestBlock33LossOfAllPossible3 >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] >> ydb-tests-fq-plans::import_test [GOOD] >> TErasureTypeTest::isSplittedDataEqualsToOldVerion >> TErasureTypeTest::TestBlock42LossOfAllPossible2 |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock31LossOfAllPossible1 [GOOD] >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlockByteOrder [GOOD] >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe42LossOfAllPossible2 [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore2 |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingBlock4Plus2 [GOOD] >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestEo [GOOD] >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore3 >> TErasureTypeTest::TestStripe22LossOfAllPossible2 >> TErasureTypeTest::TestBlock32LossOfAllPossible2 |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe23LossOfAllPossible3 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::isSplittedDataEqualsToOldVerion [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42LossOfAllPossible2 [GOOD] >> ydb-core-viewer-tests::import_test [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestEo [GOOD] |62.2%| RESOURCE $(sbr:770480022) |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock43LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe31LossOfAllPossible1 >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock33LossOfAllPossible3 [GOOD] |62.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive >> TErasureTypeTest::TestBlock23LossOfAllPossible3 |62.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe22LossOfAllPossible2 [GOOD] |62.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/tests/import_test >> ydb-core-viewer-tests::import_test [GOOD] |62.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock32LossOfAllPossible2 [GOOD] |62.2%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |62.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe31LossOfAllPossible1 [GOOD] >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 >> TErasureTypeTest::TestAllSpecies1of2 >> TErasureTypeTest::TestStripe33LossOfAllPossible3 >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 >> ydb-tests-functional-wardens::import_test [GOOD] |62.1%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock23LossOfAllPossible3 [GOOD] |62.1%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/dq/provider/ut/ydb-library-yql-providers-dq-provider-ut >> TErasureTypeTest::TestBlock22LossOfAllPossible2 >> TErasureTypeTest::TestBlock42PartialRestore1 >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] >> ErasureBrandNew::Block42_encode |62.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] >> TErasureTypeTest::TestStripe43LossOfAllPossible3 >> ydb-tests-functional-sqs-large::import_test [GOOD] >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] >> ErasureBrandNew::Block42_restore |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/wardens/import_test >> ydb-tests-functional-wardens::import_test [GOOD] >> TErasureTypeTest::TestStripe32LossOfAllPossible2 |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock22LossOfAllPossible2 [GOOD] |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestDifferentCasesInDiffSplitingMirror3Of4 [GOOD] |62.0%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} >> ydb-tests-functional-sqs-common::import_test [GOOD] >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestSplitDiffBlock4Plus2SpecialCase1 [GOOD] |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |62.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/large/import_test >> ydb-tests-functional-sqs-large::import_test [GOOD] |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode >> RuntimeFeatureFlags::ConversionToProto [GOOD] >> RuntimeFeatureFlags::ConversionFromProto [GOOD] >> RuntimeFeatureFlags::UpdatingRuntimeFlags [GOOD] >> RuntimeFeatureFlags::DefaultValues [GOOD] |62.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/ut/ydb-core-persqueue-codecs-ut |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe33LossOfAllPossible3 [GOOD] |62.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe32LossOfAllPossible2 [GOOD] >> functional-sqs-merge_split_common_table-std::import_test [GOOD] >> ydb-tests-functional-benchmarks_init::import_test [GOOD] |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |61.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/actors/spilling/ut/ydb-library-yql-dq-actors-spilling-ut >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/std/import_test >> functional-sqs-merge_split_common_table-std::import_test [GOOD] |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/base/generated/ut/unittest >> RuntimeFeatureFlags::DefaultValues [GOOD] |61.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/import_test >> ydb-tests-functional-benchmarks_init::import_test [GOOD] |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/ut/ydb-core-jaeger_tracing-ut |61.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestStripe43LossOfAllPossible3 [GOOD] |61.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |61.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/tools/protobuf_plugin/ut/ydb-core-config-tools-protobuf_plugin-ut >> TIncrHugeBasicTest::Defrag [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts >> PersQueueCodecs::FromV1Codec [GOOD] |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> test_generator.py::TestTpcdsGenerator::test_s1 >> test_generator.py::TestTpchGenerator::test_s1 |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 >> test_generator.py::TestTpchGenerator::test_s1_state |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> ErasureBrandNew::Block42_encode [GOOD] >> ErasureBrandNew::Block42_chunked >> PersQueueCodecs::ToV1Codec [GOOD] |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stability/ydb/ydb-tests-stability-ydb |61.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::FromV1Codec [GOOD] |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 |61.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> SamplingControlTests::EdgeCaseLower [GOOD] |61.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |61.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc >> ThrottlerControlTests::Simple [GOOD] >> Sticking::Check [GOOD] |61.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest >> PersQueueCodecs::ToV1Codec [GOOD] |61.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/codecs/ut/unittest |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |61.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseLower [GOOD] >> ThrottlerControlTests::LongIdle [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/incrhuge/ut/unittest >> TIncrHugeBasicTest::Defrag [GOOD] Test command err: 2025-07-08T11:54:22.535845Z :BS_INCRHUGE DEBUG: BlockSize# 8128 BlocksInChunk# 2304 BlocksInMinBlob# 65 MaxBlobsPerChunk# 35 BlocksInDataSection# 2303 BlocksInIndexSection# 1 2025-07-08T11:54:22.535884Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] starting ReadLog 2025-07-08T11:54:22.536053Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] finished ReadLog 2025-07-08T11:54:22.536066Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Recovery] ApplyReadLog Chunks# [] Deletes# [] Owners# {} CurrentSerNum# 0 NextLsn# 1 2025-07-08T11:54:22.536098Z :TEST DEBUG: finished Init Reference# [] Enumerated# [] InFlightDeletes# [] 2025-07-08T11:54:22.536107Z :TEST DEBUG: ActionsTaken# 1 2025-07-08T11:54:22.536109Z :TEST DEBUG: GetNumRequestsInFlight# 0 InFlightWritesSize# 0 2025-07-08T11:54:22.536130Z :BS_INCRHUGE INFO: [PDisk# 000000001 Recovery] [IncrHugeKeeper PDisk# 000000001] ready 2025-07-08T11:54:22.543524Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 NumReq# 0 2025-07-08T11:54:22.543993Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 HandleWrite Lsn# 0 DataSize# 811717 WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-07-08T11:54:22.544001Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-07-08T11:54:22.544005Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-07-08T11:54:22.544008Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem no free chunks 2025-07-08T11:54:22.549871Z :TEST DEBUG: GetNumRequestsInFlight# 1 InFlightWritesSize# 1 2025-07-08T11:54:22.551999Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 1 Status# OK 2025-07-08T11:54:22.552013Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 2 ChunkSerNum# 1000 2025-07-08T11:54:22.552017Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 3 ChunkSerNum# 1001 2025-07-08T11:54:22.552018Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 4 ChunkSerNum# 1002 2025-07-08T11:54:22.552021Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 5 ChunkSerNum# 1003 2025-07-08T11:54:22.552022Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 6 ChunkSerNum# 1004 2025-07-08T11:54:22.552023Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 7 ChunkSerNum# 1005 2025-07-08T11:54:22.552024Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 8 ChunkSerNum# 1006 2025-07-08T11:54:22.552026Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Allocator] ChunkIdx# 9 ChunkSerNum# 1007 2025-07-08T11:54:22.552029Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 0 2025-07-08T11:54:22.552032Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem entry 2025-07-08T11:54:22.552376Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ProcessWriteItem OffsetInBlocks# 0 IndexInsideChunk# 0 SizeInBlocks# 100 SizeInBytes# 812800 Offset# 0 Size# 812800 End# 812800 Id# 0000000000000000 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.555672Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 NumReq# 1 2025-07-08T11:54:22.555678Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 HandleWrite Lsn# 1 DataSize# 1745495 WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-07-08T11:54:22.555682Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 1 2025-07-08T11:54:22.555683Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem entry 2025-07-08T11:54:22.555892Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ProcessWriteItem OffsetInBlocks# 100 IndexInsideChunk# 1 SizeInBlocks# 215 SizeInBytes# 1747520 Offset# 812800 Size# 1747520 End# 2560320 Id# 0000000000000001 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.558924Z :TEST DEBUG: GetNumRequestsInFlight# 2 InFlightWritesSize# 2 2025-07-08T11:54:22.563944Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:602037:2:0] Lsn# 2 NumReq# 2 2025-07-08T11:54:22.564659Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 HandleWrite Lsn# 2 DataSize# 602037 WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-07-08T11:54:22.564661Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 2 2025-07-08T11:54:22.564664Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem entry 2025-07-08T11:54:22.564731Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ProcessWriteItem OffsetInBlocks# 315 IndexInsideChunk# 2 SizeInBlocks# 75 SizeInBytes# 609600 Offset# 2560320 Size# 609600 End# 3169920 Id# 0000000000000002 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.565331Z :TEST DEBUG: GetNumRequestsInFlight# 3 InFlightWritesSize# 3 2025-07-08T11:54:22.575524Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1287465:3:0] Lsn# 3 NumReq# 3 2025-07-08T11:54:22.575600Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 HandleWrite Lsn# 3 DataSize# 1287465 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-07-08T11:54:22.575605Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-07-08T11:54:22.575608Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem entry 2025-07-08T11:54:22.575743Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ProcessWriteItem OffsetInBlocks# 390 IndexInsideChunk# 3 SizeInBlocks# 159 SizeInBytes# 1292352 Offset# 3169920 Size# 1292352 End# 4462272 Id# 0000000000000003 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.580824Z :TEST DEBUG: GetNumRequestsInFlight# 4 InFlightWritesSize# 4 2025-07-08T11:54:22.581068Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 0 ApplyBlobWrite Status# OK 2025-07-08T11:54:22.581130Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 3 2025-07-08T11:54:22.581141Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Defragmenter] overall efficiency 0.030 2025-07-08T11:54:22.582866Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1501676:4:0] Lsn# 4 NumReq# 4 2025-07-08T11:54:22.584796Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 HandleWrite Lsn# 4 DataSize# 1501676 WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-07-08T11:54:22.584804Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 3 2025-07-08T11:54:22.584808Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem entry 2025-07-08T11:54:22.584993Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 4 ProcessWriteItem OffsetInBlocks# 549 IndexInsideChunk# 4 SizeInBlocks# 185 SizeInBytes# 1503680 Offset# 4462272 Size# 1503680 End# 5965952 Id# 0000000000000004 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.588852Z :TEST DEBUG: GetNumRequestsInFlight# 5 InFlightWritesSize# 5 2025-07-08T11:54:22.589030Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:687721:5:0] Lsn# 5 NumReq# 5 2025-07-08T11:54:22.589046Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 HandleWrite Lsn# 5 DataSize# 687721 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.589049Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.589052Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem entry 2025-07-08T11:54:22.589165Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 5 ProcessWriteItem OffsetInBlocks# 734 IndexInsideChunk# 5 SizeInBlocks# 85 SizeInBytes# 690880 Offset# 5965952 Size# 690880 End# 6656832 Id# 0000000000000005 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.590312Z :TEST DEBUG: GetNumRequestsInFlight# 6 InFlightWritesSize# 6 2025-07-08T11:54:22.602072Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1957662:6:0] Lsn# 6 NumReq# 6 2025-07-08T11:54:22.604995Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 HandleWrite Lsn# 6 DataSize# 1957662 WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-07-08T11:54:22.604999Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 5 2025-07-08T11:54:22.605880Z :TEST DEBUG: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-07-08T11:54:22.608376Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 1 ApplyBlobWrite Status# OK 2025-07-08T11:54:22.608394Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.608398Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 ProcessWriteItem entry 2025-07-08T11:54:22.608587Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 6 ProcessWriteItem OffsetInBlocks# 819 IndexInsideChunk# 6 SizeInBlocks# 241 SizeInBytes# 1958848 Offset# 6656832 Size# 1958848 End# 8615680 Id# 0000000000000006 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.613099Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2 ApplyBlobWrite Status# OK 2025-07-08T11:54:22.613114Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.625585Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1824284:7:0] Lsn# 7 NumReq# 7 2025-07-08T11:54:22.628998Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 HandleWrite Lsn# 7 DataSize# 1824284 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.629003Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.629007Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 ProcessWriteItem entry 2025-07-08T11:54:22.629204Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 7 ProcessWriteItem OffsetInBlocks# 1060 IndexInsideChunk# 7 SizeInBlocks# 225 SizeInBytes# 1828800 Offset# 8615680 Size# 1828800 End# 10444480 Id# 0000000000000007 ChunkIdx# 2 ChunkSerNum# 1000 Defrag# false 2025-07-08T11:54:22.629217Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 3 ApplyBlobWrite Status# OK 2025-07-08T11:54:22.629232Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 0 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.629519Z :TEST DEBUG: finished Write Id# 0000000000000000 LogoBlobId# [1:1:1:0:811717:0:0] Lsn# 0 2025-07-08T11:54:22.629529Z :TEST INFO: BytesWritten# 0 MB ElapsedTime# 0.165597s Speed# 0.00 MB/s 2025-07-08T11:54:22.629532Z :TEST DEBUG: ActionsTaken# 2 2025-07-08T11:54:22.629534Z :TEST DEBUG: GetNumRequestsInFlight# 7 InFlightWritesSize# 7 2025-07-08T11:54:22.629539Z :TEST DEBUG: sent Delete Id# 0000000000000000 NumReq# 7 2025-07-08T11:54:22.629544Z :TEST DEBUG: finished Write Id# 0000000000000001 LogoBlobId# [1:1:1:0:1745495:1:0] Lsn# 1 2025-07-08T11:54:22.629549Z :TEST INFO: BytesWritten# 0 MB ElapsedTime# 0.165620s Speed# 0.00 MB/s 2025-07-08T11:54:22.629550Z :TEST DEBUG: ActionsTaken# 3 2025-07-08T11:54:22.629551Z :TEST DEBUG: GetNumRequestsInFlight# 7 InFlightWritesSize# 6 2025-07-08T11:54:22.630641Z :TEST DEBUG: sent Write LogoBlobId# [1:1:1:0:1818240:9:0] Lsn# 9 NumReq# 7 2025-07-08T11:54:22.632985Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 8 HandleDelete Ids# [0000000000000000] 2025-07-08T11:54:22.632999Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 2 ChunkSerNum# 1000 Id# 0000000000000000 IndexInsideChunk# 0 SizeInBlocks# 100 Lsn# 2 Owner# 1 SeqNo# 8 2025-07-08T11:54:22.633004Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 2 Entrypoint# false Virtual# false 2025-07-08T11:54:22.633034Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 8 HandleWrite Lsn# 9 DataSize# 1818240 WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.633037Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 1 WriteInProgressItemsSize# 4 2025-07-08T11:54:22.633040Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 8 ProcessWriteItem entry 2025-07-08T11:54:22.633189Z :BS_IN ... PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6074 finished Status# OK 2025-07-08T11:55:44.301122Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000000b from lookup table 2025-07-08T11:55:44.301125Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3708 Status# OK 2025-07-08T11:55:44.301127Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3708 Virtual# false 2025-07-08T11:55:44.301129Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6076 finished Status# OK 2025-07-08T11:55:44.301131Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 0000000000000047 from lookup table 2025-07-08T11:55:44.301135Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3709 Status# OK 2025-07-08T11:55:44.301137Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3709 Virtual# false 2025-07-08T11:55:44.301139Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6077 finished Status# OK 2025-07-08T11:55:44.301141Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 0000000000000046 from lookup table 2025-07-08T11:55:44.301143Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3710 Status# OK 2025-07-08T11:55:44.301145Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3710 Virtual# false 2025-07-08T11:55:44.301148Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6078 finished Status# OK 2025-07-08T11:55:44.301149Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000007c from lookup table 2025-07-08T11:55:44.303599Z :TEST DEBUG: GetNumRequestsInFlight# 41 InFlightWritesSize# 18 2025-07-08T11:55:44.303623Z :TEST DEBUG: sent Delete Id# 0000000000000028 NumReq# 41 2025-07-08T11:55:44.303626Z :TEST DEBUG: GetNumRequestsInFlight# 42 InFlightWritesSize# 18 2025-07-08T11:55:44.304995Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6089 HandleDelete Ids# [0000000000000028] 2025-07-08T11:55:44.305006Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 42 ChunkSerNum# 1424 Id# 0000000000000028 IndexInsideChunk# 2 SizeInBlocks# 159 Lsn# 3716 Owner# 1 SeqNo# 6089 2025-07-08T11:55:44.305011Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3716 Entrypoint# false Virtual# false 2025-07-08T11:55:44.305051Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2365 ApplyBlobWrite Status# OK 2025-07-08T11:55:44.305075Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 10 WriteInProgressItemsSize# 4 2025-07-08T11:55:44.305077Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2370 ProcessWriteItem entry 2025-07-08T11:55:44.305349Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2370 ProcessWriteItem OffsetInBlocks# 518 IndexInsideChunk# 4 SizeInBlocks# 238 SizeInBytes# 1934464 Offset# 4210304 Size# 1934464 End# 6144768 Id# 000000000000007c ChunkIdx# 57 ChunkSerNum# 1439 Defrag# false 2025-07-08T11:55:44.305764Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1960159:6090:0] Lsn# 6090 NumReq# 42 2025-07-08T11:55:44.308990Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2380 HandleWrite Lsn# 6090 DataSize# 1960159 WriteQueueSize# 10 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.309005Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 10 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.309015Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3711 Status# OK 2025-07-08T11:55:44.309018Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3711 Virtual# false 2025-07-08T11:55:44.309024Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6081 finished Status# OK 2025-07-08T11:55:44.309028Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 0000000000000050 from lookup table 2025-07-08T11:55:44.309034Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3712 Status# OK 2025-07-08T11:55:44.309050Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3712 Virtual# false 2025-07-08T11:55:44.309052Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6082 finished Status# OK 2025-07-08T11:55:44.309054Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000005c from lookup table 2025-07-08T11:55:44.309057Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3713 Status# OK 2025-07-08T11:55:44.309059Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3713 Virtual# false 2025-07-08T11:55:44.309061Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6083 finished Status# OK 2025-07-08T11:55:44.309063Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000000a from lookup table 2025-07-08T11:55:44.309066Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3714 Status# OK 2025-07-08T11:55:44.309068Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3714 Virtual# false 2025-07-08T11:55:44.309071Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6084 finished Status# OK 2025-07-08T11:55:44.309073Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000001c from lookup table 2025-07-08T11:55:44.309076Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] sending chunk delete ChunkIdx# 46 2025-07-08T11:55:44.309080Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3715 Status# OK 2025-07-08T11:55:44.309084Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3715 Virtual# false 2025-07-08T11:55:44.309087Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6085 finished Status# OK 2025-07-08T11:55:44.309089Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 000000000000004f from lookup table 2025-07-08T11:55:44.310367Z :TEST DEBUG: GetNumRequestsInFlight# 43 InFlightWritesSize# 19 2025-07-08T11:55:44.310646Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:1374538:6091:0] Lsn# 6091 NumReq# 43 2025-07-08T11:55:44.312996Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2381 HandleWrite Lsn# 6091 DataSize# 1374538 WriteQueueSize# 11 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.313014Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 11 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.313025Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2366 ApplyBlobWrite Status# OK 2025-07-08T11:55:44.313053Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 11 WriteInProgressItemsSize# 4 2025-07-08T11:55:44.313055Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2371 ProcessWriteItem entry 2025-07-08T11:55:44.313162Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2371 ProcessWriteItem OffsetInBlocks# 756 IndexInsideChunk# 5 SizeInBlocks# 78 SizeInBytes# 633984 Offset# 6144768 Size# 633984 End# 6778752 Id# 000000000000004f ChunkIdx# 57 ChunkSerNum# 1439 Defrag# false 2025-07-08T11:55:44.313171Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3716 Status# OK 2025-07-08T11:55:44.313174Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogDeleteItem Entrypoint# false Lsn# 3716 Virtual# false 2025-07-08T11:55:44.313181Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6089 finished Status# OK 2025-07-08T11:55:44.313183Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] deleting 0000000000000028 from lookup table 2025-07-08T11:55:44.313191Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ApplyLogChunkItem Lsn# 3717 Status# OK 2025-07-08T11:55:44.313247Z :TEST DEBUG: GetNumRequestsInFlight# 44 InFlightWritesSize# 20 2025-07-08T11:55:44.313354Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:630823:6092:0] Lsn# 6092 NumReq# 44 2025-07-08T11:55:44.314544Z :TEST DEBUG: GetNumRequestsInFlight# 45 InFlightWritesSize# 21 2025-07-08T11:55:44.314695Z :TEST DEBUG: sent Write LogoBlobId# [1:2:1:0:953725:6093:0] Lsn# 6093 NumReq# 45 2025-07-08T11:55:44.316488Z :TEST DEBUG: GetNumRequestsInFlight# 46 InFlightWritesSize# 22 2025-07-08T11:55:44.316505Z :TEST DEBUG: sent Delete Id# 0000000000000015 NumReq# 46 2025-07-08T11:55:44.316508Z :TEST DEBUG: GetNumRequestsInFlight# 47 InFlightWritesSize# 22 2025-07-08T11:55:44.316512Z :TEST DEBUG: sent Delete Id# 0000000000000035 NumReq# 47 2025-07-08T11:55:44.316514Z :TEST DEBUG: GetNumRequestsInFlight# 48 InFlightWritesSize# 22 2025-07-08T11:55:44.316522Z :TEST DEBUG: sent Delete Id# 0000000000000082 NumReq# 48 2025-07-08T11:55:44.316524Z :TEST DEBUG: GetNumRequestsInFlight# 49 InFlightWritesSize# 22 2025-07-08T11:55:44.316526Z :TEST DEBUG: sent Delete Id# 0000000000000068 NumReq# 49 2025-07-08T11:55:44.316566Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2382 HandleWrite Lsn# 6092 DataSize# 630823 WriteQueueSize# 11 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.316568Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 11 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.316571Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2383 HandleWrite Lsn# 6093 DataSize# 953725 WriteQueueSize# 12 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.316572Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 12 WriteInProgressItemsSize# 5 2025-07-08T11:55:44.316580Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2367 ApplyBlobWrite Status# OK 2025-07-08T11:55:44.316588Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] WriteQueueSize# 12 WriteInProgressItemsSize# 4 2025-07-08T11:55:44.316590Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2372 ProcessWriteItem entry 2025-07-08T11:55:44.316721Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Writer] QueryId# 2372 ProcessWriteItem OffsetInBlocks# 834 IndexInsideChunk# 6 SizeInBlocks# 156 SizeInBytes# 1267968 Offset# 6778752 Size# 1267968 End# 8046720 Id# 0000000000000028 ChunkIdx# 57 ChunkSerNum# 1439 Defrag# false 2025-07-08T11:55:44.316729Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6094 HandleDelete Ids# [0000000000000015] 2025-07-08T11:55:44.316751Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 51 ChunkSerNum# 1433 Id# 0000000000000015 IndexInsideChunk# 5 SizeInBlocks# 92 Lsn# 3720 Owner# 1 SeqNo# 6094 2025-07-08T11:55:44.316754Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3720 Entrypoint# false Virtual# false 2025-07-08T11:55:44.316771Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6095 HandleDelete Ids# [0000000000000035] 2025-07-08T11:55:44.316775Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 41 ChunkSerNum# 1423 Id# 0000000000000035 IndexInsideChunk# 9 SizeInBlocks# 110 Lsn# 3721 Owner# 1 SeqNo# 6095 2025-07-08T11:55:44.316779Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3721 Entrypoint# false Virtual# false 2025-07-08T11:55:44.316784Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6096 HandleDelete Ids# [0000000000000082] 2025-07-08T11:55:44.316788Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 40 ChunkSerNum# 1422 Id# 0000000000000082 IndexInsideChunk# 3 SizeInBlocks# 72 Lsn# 3722 Owner# 1 SeqNo# 6096 2025-07-08T11:55:44.316789Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3722 Entrypoint# false Virtual# false 2025-07-08T11:55:44.316793Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Deleter] Owner# 1 SeqNo# 6097 HandleDelete Ids# [0000000000000068] 2025-07-08T11:55:44.316796Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] LogBlobDeletes ChunkIdx# 48 ChunkSerNum# 1430 Id# 0000000000000068 IndexInsideChunk# 8 SizeInBlocks# 103 Lsn# 3723 Owner# 1 SeqNo# 6097 2025-07-08T11:55:44.316798Z :BS_INCRHUGE DEBUG: [PDisk# 000000001 Logger] ProcessDeleteQueueItem Lsn# 3723 Entrypoint# false Virtual# false |61.4%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step |61.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Simple [GOOD] |61.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so >> ValidationTests::MapType [GOOD] |61.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |61.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::LongIdle [GOOD] >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] >> ValidationTests::HasReservedPaths [GOOD] >> SamplingControlTests::EdgeCaseUpper [GOOD] >> ValidationTests::CanDispatchByTag [GOOD] |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::MapType [GOOD] >> ValidationTests::AdvancedCopyTo [GOOD] |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] |61.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::HasReservedPaths [GOOD] |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded2Threads200Ticks30Init7Step [GOOD] |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |61.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::EdgeCaseUpper [GOOD] |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so >> SamplingControlTests::Simple [GOOD] |61.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::AdvancedCopyTo [GOOD] |61.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |61.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanDispatchByTag [GOOD] |61.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part10/ydb-library-yql-tests-sql-hybrid_file-part10 >> ErasureBrandNew::Block42_chunked [GOOD] |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |61.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest >> ValidationTests::CanCopyTo [GOOD] |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/bin/ydbd_slice >> ThrottlerControlTests::Overflow_2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/interconnect/ut/unittest >> Sticking::Check [GOOD] Test command err: 0: ch1=8074 mean# 8074 dev# 0 part# 0 1: ch1=8074 ch2=8074 mean# 8074 dev# 0 part# 0 2: ch1=13338 ch2=10884 mean# 12111 dev# 1227 part# 0.1013128561 3: ch1=18148 ch2=14148 mean# 16148 dev# 2000 part# 0.1238543473 4: ch1=20188 ch2=20182 mean# 20185 dev# 3 part# 0.0001486252167 5: ch1=27764 ch2=20680 mean# 24222 dev# 3542 part# 0.1462306994 6: ch1=28222 ch2=28296 mean# 28259 dev# 37 part# 0.001309317386 7: ch1=34120 ch2=30472 mean# 32296 dev# 1824 part# 0.05647758236 8: ch1=37842 ch2=34824 mean# 36333 dev# 1509 part# 0.04153249112 9: ch1=40380 ch2=40360 mean# 40370 dev# 10 part# 0.0002477086946 10: ch1=47458 ch2=41356 mean# 44407 dev# 3051 part# 0.06870538429 11: ch1=49004 ch2=47884 mean# 48444 dev# 560 part# 0.01155973908 12: ch1=53814 ch2=51148 mean# 52481 dev# 1333 part# 0.02539966845 13: ch1=58624 ch2=54412 mean# 56518 dev# 2106 part# 0.03726246506 14: ch1=60572 ch2=60538 mean# 60555 dev# 17 part# 0.0002807365205 15: ch1=68240 ch2=60944 mean# 64592 dev# 3648 part# 0.05647758236 16: ch1=68698 ch2=68560 mean# 68629 dev# 69 part# 0.001005405878 17: ch1=74596 ch2=70736 mean# 72666 dev# 1930 part# 0.0265598767 18: ch1=78318 ch2=75088 mean# 76703 dev# 1615 part# 0.02105523904 19: ch1=80764 ch2=80716 mean# 80740 dev# 24 part# 0.0002972504335 20: ch1=87934 ch2=81620 mean# 84777 dev# 3157 part# 0.03723887375 21: ch1=89480 ch2=88148 mean# 88814 dev# 666 part# 0.007498817754 22: ch1=94290 ch2=91412 mean# 92851 dev# 1439 part# 0.01549794833 23: ch1=99100 ch2=94676 mean# 96888 dev# 2212 part# 0.02283048468 24: ch1=100956 ch2=100894 mean# 100925 dev# 31 part# 0.0003071587813 25: ch1=108716 ch2=101208 mean# 104962 dev# 3754 part# 0.03576532459 26: ch1=109174 ch2=108824 mean# 108999 dev# 175 part# 0.001605519317 27: ch1=113984 ch2=112088 mean# 113036 dev# 948 part# 0.008386708659 28: ch1=119882 ch2=114264 mean# 117073 dev# 2809 part# 0.02399357666 29: ch1=121148 ch2=121072 mean# 121110 dev# 38 part# 0.0003137643465 30: ch1=128410 ch2=121884 mean# 125147 dev# 3263 part# 0.02607333775 31: ch1=121882 ch2=128412 mean# 125147 dev# 3265 part# 0.02608931896 32: ch1=126692 ch2=123602 mean# 125147 dev# 1545 part# 0.01234548171 33: ch1=126238 ch2=124056 mean# 125147 dev# 1091 part# 0.008717747928 34: ch1=123192 ch2=127102 mean# 125147 dev# 1955 part# 0.01562162896 35: ch1=129004 ch2=121290 mean# 125147 dev# 3857 part# 0.03081975597 36: ch1=121428 ch2=128866 mean# 125147 dev# 3719 part# 0.02971705275 37: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 38: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 39: ch1=123686 ch2=126608 mean# 125147 dev# 1461 part# 0.01167427106 40: ch1=128502 ch2=121792 mean# 125147 dev# 3355 part# 0.02680847324 41: ch1=121882 ch2=128412 mean# 125147 dev# 3265 part# 0.02608931896 42: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 43: ch1=125146 ch2=125148 mean# 125147 dev# 1 part# 7.990603051e-06 44: ch1=123096 ch2=127198 mean# 125147 dev# 2051 part# 0.01638872686 45: ch1=129092 ch2=121202 mean# 125147 dev# 3945 part# 0.03152292904 46: ch1=121424 ch2=128870 mean# 125147 dev# 3723 part# 0.02974901516 47: ch1=126230 ch2=124064 mean# 125147 dev# 1083 part# 0.008653823104 48: ch1=125142 ch2=125152 mean# 125147 dev# 5 part# 3.995301525e-05 49: ch1=123590 ch2=126704 mean# 125147 dev# 1557 part# 0.01244136895 50: ch1=128590 ch2=121704 mean# 125147 dev# 3443 part# 0.0275116463 51: ch1=121878 ch2=128416 mean# 125147 dev# 3269 part# 0.02612128137 52: ch1=126230 ch2=124064 mean# 125147 dev# 1083 part# 0.008653823104 53: ch1=125142 ch2=125152 mean# 125147 dev# 5 part# 3.995301525e-05 54: ch1=123000 ch2=127244 mean# 125122 dev# 2122 part# 0.01695944758 55: ch1=128046 ch2=122198 mean# 125122 dev# 2924 part# 0.02336919167 56: ch1=121832 ch2=128412 mean# 125122 dev# 3290 part# 0.02629433673 57: ch1=126184 ch2=124060 mean# 125122 dev# 1062 part# 0.008487715989 58: ch1=126184 ch2=124060 mean# 125122 dev# 1062 part# 0.008487715989 59: ch1=122410 ch2=127834 mean# 125122 dev# 2712 part# 0.02167484535 60: ch1=128636 ch2=121608 mean# 125122 dev# 3514 part# 0.02808458944 61: ch1=121832 ch2=128412 mean# 125122 dev# 3290 part# 0.02629433673 62: ch1=126184 ch2=124060 mean# 125122 dev# 1062 part# 0.008487715989 63: ch1=125096 ch2=125148 mean# 125122 dev# 26 part# 0.0002077971899 64: ch1=122908 ch2=127336 mean# 125122 dev# 2214 part# 0.01769472994 65: ch1=128138 ch2=122106 mean# 125122 dev# 3016 part# 0.02410447403 66: ch1=121832 ch2=128412 mean# 125122 dev# 3290 part# 0.02629433673 67: ch1=126642 ch2=123602 mean# 125122 dev# 1520 part# 0.01214814341 68: ch1=126188 ch2=124056 mean# 125122 dev# 1066 part# 0.008519684788 69: ch1=122322 ch2=127922 mean# 125122 dev# 2800 part# 0.02237815892 70: ch1=128732 ch2=121512 mean# 125122 dev# 3610 part# 0.0288518406 71: ch1=121836 ch2=128408 mean# 125122 dev# 3286 part# 0.02626236793 72: ch1=126188 ch2=124056 mean# 125122 dev# 1066 part# 0.008519684788 73: ch1=126188 ch2=124056 mean# 125122 dev# 1066 part# 0.008519684788 74: ch1=123908 ch2=126336 mean# 125122 dev# 1214 part# 0.00970253033 75: ch1=128234 ch2=122010 mean# 125122 dev# 3112 part# 0.0248717252 76: ch1=121836 ch2=128408 mean# 125122 dev# 3286 part# 0.02626236793 77: ch1=126646 ch2=123598 mean# 125122 dev# 1524 part# 0.01218011221 78: ch1=126192 ch2=124052 mean# 125122 dev# 1070 part# 0.008551653586 79: ch1=123322 ch2=126922 mean# 125122 dev# 1800 part# 0.0143859593 80: ch1=128828 ch2=121416 mean# 125122 dev# 3706 part# 0.02961909177 81: ch1=121840 ch2=128404 mean# 125122 dev# 3282 part# 0.02623039913 82: ch1=126192 ch2=124052 mean# 125122 dev# 1070 part# 0.008551653586 83: ch1=126192 ch2=124052 mean# 125122 dev# 1070 part# 0.008551653586 84: ch1=123820 ch2=126424 mean# 125122 dev# 1302 part# 0.0104058439 85: ch1=128330 ch2=121964 mean# 125147 dev# 3183 part# 0.02543408951 86: ch1=121886 ch2=128408 mean# 125147 dev# 3261 part# 0.02605735655 87: ch1=126238 ch2=124056 mean# 125147 dev# 1091 part# 0.008717747928 88: ch1=126238 ch2=124056 mean# 125147 dev# 1091 part# 0.008717747928 89: ch1=123276 ch2=127018 mean# 125147 dev# 1871 part# 0.01495041831 90: ch1=128920 ch2=121374 mean# 125147 dev# 3773 part# 0.03014854531 91: ch1=121428 ch2=128866 mean# 125147 dev# 3719 part# 0.02971705275 92: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 93: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 94: ch1=123770 ch2=126524 mean# 125147 dev# 1377 part# 0.0110030604 95: ch1=128418 ch2=121876 mean# 125147 dev# 3271 part# 0.02613726258 96: ch1=121882 ch2=128412 mean# 125147 dev# 3265 part# 0.02608931896 97: ch1=126234 ch2=124060 mean# 125147 dev# 1087 part# 0.008685785516 98: ch1=125146 ch2=125148 mean# 125147 dev# 1 part# 7.990603051e-06 99: ch1=123180 ch2=127114 mean# 125147 dev# 1967 part# 0.0157175162 100: ch1=126832 ch2=122374 ch3=1088 mean# 83431.33333 dev# 58253.9661 part# 0.6982264788 101: ch1=119248 ch2=125606 ch3=5440 mean# 83431.33333 dev# 55209.25102 part# 0.6617328145 102: ch1=123198 ch2=119712 ch3=7384 mean# 83431.33333 dev# 53792.41415 part# 0.6447507429 103: ch1=118610 ch2=120800 ch3=10884 mean# 83431.33333 dev# 51306.5019 part# 0.6149548359 104: ch1=120786 ch2=118624 ch3=10884 mean# 83431.33333 dev# 51306.30397 part# 0.6149524636 105: ch1=119344 ch2=115714 ch3=15236 mean# 83431.33333 dev# 48244.14881 part# 0.578249764 106: ch1=112258 ch2=118624 ch3=19412 mean# 83431.33333 dev# 45343.04628 part# 0.5434774259 107: ch1=115342 ch2=114272 ch3=20680 mean# 83431.33333 dev# 44374.04347 part# 0.5318630507 108: ch1=112078 ch2=114272 ch3=23944 mean# 83431.33333 dev# 42073.43207 part# 0.504288142 109: ch1=112988 ch2=112096 ch3=25210 mean# 83431.33333 dev# 41170.31015 part# 0.4934634089 110: ch1=111048 ch2=109858 ch3=29388 mean# 83431.33333 dev# 38217.49541 part# 0.4580712531 111: ch1=107722 ch2=111008 ch3=31564 mean# 83431.33333 dev# 36700.26929 part# 0.4398859257 112: ch1=107264 ch2=107744 ch3=35286 mean# 83431.33333 dev# 34044.45566 part# 0.4080535968 113: ch1=108196 ch2=105568 ch3=36530 mean# 83431.33333 dev# 33181.6003 part# 0.3977114948 114: ch1=102298 ch2=107660 ch3=40336 mean# 83431.33333 dev# 30551.52563 part# 0.3661876708 115: ch1=106818 ch2=102108 ch3=41368 mean# 83431.33333 dev# 29805.35783 part# 0.3572441748 116: ch1=99640 ch2=104934 ch3=45720 mean# 83431.33333 dev# 26753.38116 part# 0.3206634737 117: ch1=103404 ch2=99078 ch3=47812 mean# 83431.33333 dev# 25248.5148 part# 0.3026262891 118: ch1=97914 ch2=101220 ch3=51160 mean# 83431.33333 dev# 22859.15754 part# 0.273987681 119: ch1=100090 ch2=97956 ch3=52248 mean# 83431.33333 dev# 22067.15051 part# 0.2644947603 120: ch1=98242 ch2=96082 ch3=55970 mean# 83431.33333 dev# 19438.10727 part# 0.2329832989 121: ch1=92474 ch2=99040 ch3=58780 mean# 83431.33333 dev# 17636.02875 part# 0.2113837578 122: ch1=96196 ch2=93142 ch3=60956 mean# 83431.33333 dev# 15941.29199 part# 0.1910708046 123: ch1=92478 ch2=93596 ch3=64220 mean# 83431.33333 dev# 13592.12952 part# 0.1629139674 124: ch1=91390 ch2=92508 ch3=66396 mean# 83431.33333 dev# 12054.44364 part# 0.1444834112 125: ch1=90132 ch2=90044 ch3=70118 mean# 83431.33333 dev# 9414.016831 part# 0.1128355074 126: ch1=87038 ch2=91416 ch3=71840 mean# 83431.33333 dev# 8388.920365 part# 0.100548799 127: ch1=88126 ch2=87064 ch3=75104 mean# 83431.33333 dev# 5904.253909 part# 0.07076782395 128: ch1=87330 ch2=85976 ch3=76988 mean# 83431.33333 dev# 4589.534278 part# 0.05500971991 129: ch1=83608 ch2=85976 ch3=80710 mean# 83431.33333 dev# 2153.461916 part# 0.02581118903 130: ch1=86820 ch2=81834 ch3=81640 mean# 83431.33333 dev# 2397.457728 part# 0.02873569955 131: ch1=81136 ch2=84254 ch3=84904 mean# 83431.33333 dev# 1644.595459 part# 0.01971196423 132: ch1=85172 ch2=82394 ch3=82728 mean# 83431.33333 dev# 1238.367025 part# 0.01484294899 133: ch1=81358 ch2=84888 ch3=84048 mean# 83431.33333 dev# 1505.641244 part# 0.01804647228 134: ch1=87034 ch2=81624 ch3=81636 mean# 83431.33333 dev# 2547.474741 part# 0.03053378915 135: ch1=78960 ch2=85346 ch3=85988 mean# 83431.33333 dev# 3172.554947 part# 0.03802594085 136: ch1=82564 ch2=83918 ch3=83812 mean# 83431.33333 dev# 614.8221062 part# 0.007369199096 137: ch1=84678 ch2=82716 ch3=82900 mean# 83431.33333 dev# 884.7211739 part# 0.01060418357 138: ch1=84858 ch2=82716 ch3=82720 mean# 83431.33333 dev# 1008.806996 part# 0.01209146439 139: ch1=83312 ch2=83174 ch3=83808 mean# 83431.33333 dev# 272.236825 part# 0.003263004607 140: ch1=80764 ch2=84812 ch3=84718 mean# 83431.33333 dev# 1886.479849 part# 0.02261116746 141: ch1=84402 ch2=83176 ch3=82716 mean# 83431.33333 dev# 711.5922678 part# 0.008529077019 142: ch1=81598 ch2=84892 ch3=83804 mean# 83431.33333 dev# 1370.345293 part# 0.01642482792 143: ch1=86408 ch2=81628 ch3=82258 mean# 83431.33333 dev# 2120.476886 part# 0.02541583361 144: ch1=80666 ch2=84892 ch3=84736 mean# 83431.33333 dev# 1956.422813 part# 0.02344949714 145: ch1=87018 ch2=81712 ch3=81564 mean# 83431.33333 dev# 2536.875944 part# 0.03040675299 146: ch1=81148 ch2=84262 ch3=84884 mean# 83431.33333 dev# 1634.406994 part# 0.01958984627 147: ch1=84236 ch2=83350 ch3=82708 mean# 83431.33333 dev# 626.4488983 part# 0.007508556717 148: ch1=81560 ch2=84854 ch3=83880 mean# 83431.33333 dev# 1381.686248 part# 0.01656075953 149: ch1=87050 ch2=81624 ch3=81620 mean# 83431.33333 dev# 2558.78426 part# 0.03066934397 150: ch1=80064 ch2=85346 ch3=84884 mean# 83431.33333 dev# 2388.522742 part# 0.02862860566 151: ch1=83478 ch2=83478 ch3=83338 mean# 83431.33333 dev# 65.99663291 part# 0.0007910293444 152: ch1=83782 ch2=82720 ch3=83792 mean# 83431.33333 dev# 503.005191 part# 0.006028972221 153: ch1=82694 ch2=83808 ch3=83792 mean# 83431.33333 dev# 521.414316 part# 0.006249622236 154: ch1=84870 ch2=82720 ch3=82704 mean# 83431.33333 dev# 1017.311926 part# 0.01219340367 155: ch1=80518 ch2=84896 ch3=84880 mean# 83431.33333 dev# 2060.048112 part# 0.02469154009 156: ch1=85328 ch2=82720 ch3=82246 mean# 83431.33333 dev# 1355.034399 part# 0.016241313 157: ch1=82698 ch2=83808 ch3=83788 mean# 83431.33333 dev# 518.6092513 part# 0.006216000999 158: ... 5061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes written 4195061 bytes 2025-07-08T11:55:35.577279Z :INTERCONNECT INFO: Proxy [1:7524678488685128302:2048] [node 2] ICP01 ready to work 2025-07-08T11:55:35.581414Z :INTERCONNECT INFO: Proxy [2:7524678488265227990:2048] [node 1] ICP01 ready to work 2025-07-08T11:55:35.583361Z :INTERCONNECT WARN: Handshake [2:7524678488265227998:2055] [node 1] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-07-08T11:55:35.583817Z :INTERCONNECT INFO: Proxy [2:7524678488265227990:2048] [node 1] ICP08 No active sessions, becoming PendingConnection 2025-07-08T11:55:35.583866Z :INTERCONNECT INFO: Handshake [2:7524678488265227998:2055] [node 1] ICH04 handshake succeeded 2025-07-08T11:55:35.583905Z :INTERCONNECT WARN: Handshake [1:7524678488685128311:2055] [node 2] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-07-08T11:55:35.583917Z :INTERCONNECT INFO: Handshake [1:7524678488685128311:2055] [node 2] ICH04 handshake succeeded 2025-07-08T11:55:35.583982Z :INTERCONNECT INFO: Proxy [1:7524678488685128302:2048] [node 2] ICP20 outgoing handshake succeeded 2025-07-08T11:55:35.583993Z :INTERCONNECT_STATUS INFO: [2] session created 2025-07-08T11:55:35.583998Z :INTERCONNECT INFO: Proxy [1:7524678488685128302:2048] [node 2] ICP22 created new session: [1:7524678488685128313:2048] 2025-07-08T11:55:35.584001Z :INTERCONNECT_SESSION INFO: Session [1:7524678488685128313:2048] [node 2] ICS09 handshake done sender: [1:7524678488685128311:2055] self: [1:7524678488685128310:0] peer: [2:7524678488265227999:0] socket: 22 2025-07-08T11:55:35.584004Z :INTERCONNECT_SESSION INFO: Session [1:7524678488685128313:2048] [node 2] ICS10 traffic start 2025-07-08T11:55:35.584009Z :INTERCONNECT_STATUS INFO: [2] connected 2025-07-08T11:55:35.583947Z :INTERCONNECT INFO: Proxy [2:7524678488265227990:2048] [node 1] ICP19 incoming handshake succeeded 2025-07-08T11:55:35.583962Z :INTERCONNECT_STATUS INFO: [1] session created 2025-07-08T11:55:35.583968Z :INTERCONNECT INFO: Proxy [2:7524678488265227990:2048] [node 1] ICP22 created new session: [2:7524678488265228000:2048] 2025-07-08T11:55:35.583978Z :INTERCONNECT_SESSION INFO: Session [2:7524678488265228000:2048] [node 1] ICS09 handshake done sender: [2:7524678488265227998:2055] self: [2:7524678488265227999:0] peer: [1:7524678488685128310:0] socket: 23 2025-07-08T11:55:35.583983Z :INTERCONNECT_SESSION INFO: Session [2:7524678488265228000:2048] [node 1] ICS10 traffic start 2025-07-08T11:55:35.584001Z :INTERCONNECT_STATUS INFO: [1] connected Updated MaxRTT# 0.003811s Updated MaxRTT# 0.009373s Updated MaxRTT# 0.009382s Updated MaxRTT# 0.010141s Updated MaxRTT# 0.010148s Updated MaxRTT# 0.010429s Updated MaxRTT# 0.010432s Updated MaxRTT# 0.011366s Updated MaxRTT# 0.011373s Updated MaxRTT# 0.012133s Updated MaxRTT# 0.012610s Updated MaxRTT# 0.012805s Updated MaxRTT# 0.015626s Updated MaxRTT# 0.019179s Updated MaxRTT# 0.021996s Updated MaxRTT# 0.022007s |61.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> SamplingControlTests::Simple [GOOD] |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |61.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut >> ThrottlerControlTests::Overflow_1 [GOOD] |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |60.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/config/tools/protobuf_plugin/ut/unittest |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part14/ydb-library-yql-tests-sql-dq_file-part14 |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_chunked [GOOD] Test command err: totalSize# 502324784 period1# 1.966748s period2# 0.321413s MB/s1# 255.408819 MB/s2# 1562.863929 factor# 6.11906799 |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_2 [GOOD] |60.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |60.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::Overflow_1 [GOOD] |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |60.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part12/ydb-library-yql-tests-sql-dq_file-part12 >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |60.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded10Threads100Ticks1000Init22Step [GOOD] |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |60.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/jaeger_tracing/ut/unittest >> ThrottlerControlTests::MultiThreaded5Threads150Ticks500Init15Step [GOOD] |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/tstool/tstool |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 |60.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |60.0%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/ut/ydb-core-formats-arrow-accessor-composite-ut |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |59.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut |59.7%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump >> TPDiskRaces::DecommitWithInflightMock [GOOD] >> TPDiskRaces::KillOwnerWhileDecommitting |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.4%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/local_ydb/local_ydb >> ydb-tests-functional-autoconfig::import_test [GOOD] |59.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |59.4%| [TA] $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBtreeIndexTPartLarge::Group [GOOD] >> TBtreeIndexTPartLarge::History >> ydb-tests-sql::import_test [GOOD] >> ydb-tests-functional-hive::import_test [GOOD] |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/ut/ydb-core-formats-arrow-accessor-sub_columns-ut |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/ut/ydb-core-formats-arrow-accessor-sparsed-ut |59.2%| COMPACTING CACHE 25.0GiB |59.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/autoconfig/import_test >> ydb-tests-functional-autoconfig::import_test [GOOD] |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |59.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |59.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/bin/ydb_configure |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |59.2%| [TA] $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut >> tstool::import_test [GOOD] >> ydb-tests-functional-large_serializable::import_test [GOOD] |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/providers/dq/actors/ut/ydb-library-yql-providers-dq-actors-ut |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/tstool/import_test >> tstool::import_test [GOOD] |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/large_serializable/import_test >> ydb-tests-functional-large_serializable::import_test [GOOD] |59.3%| [TA] $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/opt/ut/ydb-library-yql-dq-opt-ut >> ydb-tests-fq-restarts::import_test [GOOD] |59.3%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.3%| [TA] $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/multinode/import_test >> ydb-tests-functional-sqs-multinode::import_test [GOOD] |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/driver/nemesis |59.3%| [PR] {BAZEL_UPLOAD} $(B)/ydb/tests/stability/tool/{c79bf977cdb0ffe390211f5e3d.yasm ... ro_363ad6a7a0ee9cfe4ed6517f8f.rodata} >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] >> ydbd_slice::import_test [GOOD] >> test.py::test[column_group-hint_non_yson_fail--ForceBlocks] >> test.py::test[column_group-hint_non_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-insert--ForceBlocks] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tools/ydbd_slice/bin/import_test >> ydbd_slice::import_test [GOOD] |59.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 >> test.py::test[blocks-pg_sort--ForceBlocks] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] >> test.py::test[select-struct_members-default.txt-Results] >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] >> test.py::test[pg-pg_column_case--Results] >> ydb-tests-fq-mem_alloc::import_test [GOOD] |59.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore0 [GOOD] |59.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] >> test.py::test[lineage-window_member_struct-default.txt-ForceBlocks] >> test.py::test[lineage-window_member_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[order_by-SortByOneField--ForceBlocks] |59.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] |59.3%| [AR] {RESULT} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |59.3%| [AR] {RESULT} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> ydb-tests-stability-ydb::import_test [GOOD] |59.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/ut/ydb-core-fq-libs-db_id_async_resolver_impl-ut |59.4%| [LD] {RESULT} $(B)/ydb/library/actors/interconnect/ut_huge_cluster/ydb-library-actors-interconnect-ut_huge_cluster |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/dq/opt/ut/ydb-library-yql-dq-opt-ut |59.4%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/bin/mvp_oidc_proxy |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |59.4%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/public/ut/ydb-core-tx-sequenceshard-public-ut |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/dq/runtime/ut/ydb-library-yql-providers-dq-runtime-ut |59.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |59.4%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |59.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stability/ydb/import_test >> ydb-tests-stability-ydb::import_test [GOOD] >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |59.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |59.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_util/ydb-core-tablet_flat-ut_util >> ydb-tests-functional-scheme_tests::import_test [GOOD] |59.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore3 [GOOD] |59.4%| [LD] {RESULT} $(B)/ydb/core/config/validation/ut/ydb-core-config-validation-ut |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/object_listers/ut/ydb-library-yql-providers-s3-object_listers-ut >> ydb-tests-functional-serverless::import_test [GOOD] >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] |59.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/dq/scheduler/ut/ydb-library-yql-providers-dq-scheduler-ut >> test.py::test[join-split_to_list_as_key--Results] >> SharedThreads::RegistrationAndPassingAwayActorsTail [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsStrictPool >> test.py::test[column_group-hint_anon-perusage-ForceBlocks] |59.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] >> test.py::test[column_group-hint_anon-perusage-ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_anon-perusage-Results] [SKIPPED] >> test.py::test[column_group-hint_anon-single-ForceBlocks] [SKIPPED] >> ydb-tests-functional-script_execution::import_test [GOOD] |59.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/ydb-core-blobstorage-vdisk-hulldb-barriers-ut >> test.py::test[column_group-hint_anon-single-Results] [SKIPPED] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] |59.5%| [LD] {RESULT} $(B)/ydb/mvp/core/ut/ydb-mvp-core-ut |59.5%| [LD] {RESULT} $(B)/ydb/library/workload/benchmark_base/ut/ydb-library-workload-benchmark_base-ut |59.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |59.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/ydb-core-blobstorage-vdisk-query-ut |59.5%| [LD] {RESULT} $(B)/ydb/library/actors/interconnect/ut/ydb-library-actors-interconnect-ut |59.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/script_execution/import_test >> ydb-tests-functional-script_execution::import_test [GOOD] |59.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |59.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] |59.5%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/spilling/ut/ydb-library-yql-dq-actors-spilling-ut |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |59.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/backpressure/ut_client/ydb-core-blobstorage-backpressure-ut_client |59.5%| [LD] {RESULT} $(B)/ydb/library/yql/providers/common/http_gateway/ut/ydb-library-yql-providers-common-http_gateway-ut |59.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_group/ydb-core-blobstorage-ut_group |59.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/defrag/ut/ydb-core-blobstorage-vdisk-defrag-ut |59.5%| [LD] {RESULT} $(B)/ydb/mvp/meta/ut/ydb-mvp-meta-ut |59.5%| [TS] {RESULT} ydb/tests/functional/hive/import_test |59.5%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test |59.5%| [TS] {RESULT} ydb/tests/sql/import_test |59.5%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |59.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/ydb-core-blobstorage-vdisk-hulldb-generic-ut |59.6%| [TS] {RESULT} ydb/tests/functional/autoconfig/import_test |59.6%| [TS] {RESULT} ydb/tools/cfg/bin/flake8 |59.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |59.6%| [TS] {RESULT} ydb/tests/functional/script_execution/import_test >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |59.6%| [LD] {RESULT} $(B)/ydb/library/actors/core/ut/ydb-library-actors-core-ut |59.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/object_storage/inference/ut/external_sources-object_storage-inference-ut |59.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore1 [GOOD] |59.6%| [LD] {RESULT} $(B)/ydb/library/actors/dnsresolver/ut/ydb-library-actors-dnsresolver-ut |59.6%| [LD] {RESULT} $(B)/ydb/library/workload/tpch/ut/ydb-library-workload-tpch-ut |59.6%| [LD] {RESULT} $(B)/ydb/core/driver_lib/version/ut/ydb-core-driver_lib-version-ut |59.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/ydb-core-blobstorage-vdisk-common-ut |59.6%| [TS] {RESULT} ydb/tests/functional/clickbench/import_test |59.6%| [LD] {RESULT} $(B)/ydb/core/io_formats/arrow/scheme/ut/ydb-core-io_formats-arrow-scheme-ut |59.6%| [TS] {RESULT} ydb/tests/fq/multi_plane/import_test >> ydb-tests-sql-large::import_test [GOOD] >> ydb-tests-functional-encryption::import_test [GOOD] |59.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |59.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |59.6%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/accessor/sub_columns/ut/ydb-core-formats-arrow-accessor-sub_columns-ut |59.7%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |59.7%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/import_test >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] |59.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |59.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |59.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |59.7%| [LD] {RESULT} $(B)/ydb/library/actors/cppcoro/ut/ydb-library-actors-cppcoro-ut |59.7%| [LD] {RESULT} $(B)/ydb/mvp/oidc_proxy/ut/ydb-mvp-oidc_proxy-ut >> test.py::test[blocks-pg_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_sort--Results] |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |59.7%| [LD] {RESULT} $(B)/ydb/library/actors/helpers/ut/ydb-library-actors-helpers-ut |59.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |59.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestBlock42PartialRestore2 [GOOD] >> test.py::test[type_v3-append_diff_flags--ForceBlocks] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> ydb-tests-stress-log-tests::import_test [GOOD] |59.7%| [LD] {RESULT} $(B)/ydb/library/actors/http/ut/ydb-library-actors-http-ut >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] |59.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/provider/ut/ydb-library-yql-providers-s3-provider-ut |59.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> ydb-tests-functional-limits::import_test [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_semi_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_cast_to_string--ForceBlocks] >> ydb-tests-fq-common::import_test [GOOD] |59.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/log/tests/import_test >> ydb-tests-stress-log-tests::import_test [GOOD] |59.7%| [TS] {RESULT} ydb/tests/sql/large/import_test |59.7%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |59.7%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump_ds_init/yaml-to-proto-dump-ds-init |59.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/yt/actors/ut/ydb-library-yql-providers-yt-actors-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/ydb-core-blobstorage-vdisk-repl-ut |59.8%| [LD] {RESULT} $(B)/ydb/library/yql/dq/actors/compute/ut/ydb-library-yql-dq-actors-compute-ut >> test.py::test[order_by-SortByOneField--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[select-struct_members-default.txt-Results] [GOOD] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] |59.8%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/limits/import_test >> ydb-tests-functional-limits::import_test [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] |59.8%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |59.8%| [TS] {RESULT} ydb/tests/stress/log/tests/import_test >> test.py::test[schema-read_schema_change_other--ForceBlocks] |59.8%| [TS] {RESULT} ydb/library/workload/benchmark_base/ut/unittest |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/common/import_test >> ydb-tests-fq-common::import_test [GOOD] |59.8%| [LD] {RESULT} $(B)/ydb/library/actors/core/harmonizer/ut/ydb-library-actors-core-harmonizer-ut |59.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> test.py::test[union_all-union_all_multiple-default.txt-Results] |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> ydb-tests-functional-audit::import_test [GOOD] |59.8%| [LD] {RESULT} $(B)/ydb/core/config/init/ut/ydb-core-config-init-ut >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] |59.8%| [TS] {RESULT} ydb/tools/ydbd_slice/bin/import_test |59.8%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |59.8%| [TS] {RESULT} ydb/tests/functional/limits/import_test |59.8%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/accessor/composite/ut/ydb-core-formats-arrow-accessor-composite-ut >> TPDiskRaces::KillOwnerWhileDecommitting [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] |59.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/audit/import_test >> ydb-tests-functional-audit::import_test [GOOD] >> test.py::test[lineage-isolated-default.txt-ForceBlocks] >> TColumnShardTestSchema::RebootHotTiersTtl >> test.py::test[blocks-pg_sort--Results] [GOOD] >> test.py::test[lineage-isolated-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[blocks-pg_top_sort--ForceBlocks] >> test.py::test[lineage-isolated-default.txt-Results] >> test.py::test[lineage-isolated-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-ForceBlocks] >> test.py::test[lineage-reduce_all_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] >> test.py::test[hor_join-fuse_multi_outs2--Results] >> TestProgram::JsonValue >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[action-nested_subquery--Results] >> test.py::test[hor_join-merge_multiouts_part--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-merge_multiouts_part--Results] >> test.py::test[hor_join-merge_multiouts_part--Results] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--ForceBlocks] >> test.py::test[hor_join-sorted_out_mix--ForceBlocks] [SKIPPED] >> test.py::test[hor_join-sorted_out_mix--Results] [SKIPPED] >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] |59.8%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/import_test >> TestProgram::JsonValue [GOOD] |59.8%| [LD] {RESULT} $(B)/ydb/core/metering/ut/ydb-core-metering-ut |59.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |59.8%| [LD] {RESULT} $(B)/ydb/library/yql/providers/dq/provider/ut/ydb-library-yql-providers-dq-provider-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/public/ut/ydb-core-tx-long_tx_service-public-ut >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_by_session_star--ForceBlocks] >> test.py::test[schema-insert_sorted-read_schema-Results] |59.9%| [LD] {RESULT} $(B)/ydb/core/external_sources/ut/ydb-core-external_sources-ut |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |59.9%| [TS] {RESULT} ydb/tests/fq/common/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:96;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:96;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\ ... } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:96;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |59.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-finalize_hashed_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-finalize_hashed_keys--Results] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] |59.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 |59.9%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |59.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest |59.9%| [TS] {RESULT} ydb/tests/stability/ydb/import_test >> TCdcStreamTests::VirtualTimestamps |59.9%| [TA] {RESULT} $(B)/ydb/core/config/tools/protobuf_plugin/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TS] {RESULT} ydb/tools/tstool/import_test |59.9%| [TA] {RESULT} $(B)/ydb/core/jaeger_tracing/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a >> test.py::test[blocks-combine_all_pg_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] |59.9%| [AR] {RESULT} $(B)/ydb/tests/stability/tool/libpy3tests-stability-tool.a >> test.py::test[schema-read_schema_change_other--ForceBlocks] [GOOD] >> test.py::test[schema-read_schema_change_other--Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] |59.9%| [TS] {RESULT} ydb/tests/functional/large_serializable/import_test |59.9%| [TS] {RESULT} ydb/tests/functional/serverless/import_test >> test.py::test[select-where_in-default.txt-Results] |59.9%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |59.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/ydb-core-blobstorage-vdisk-huge-ut |59.9%| [TS] {RESULT} ydb/tests/functional/audit/import_test |60.0%| [TS] {RESULT} ydb/mvp/meta/ut/unittest |60.0%| [TS] {RESULT} ydb/tests/stability/tool/flake8 |60.0%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |60.0%| [LD] {RESULT} $(B)/ydb/library/actors/cppcoro/corobenchmark/corobenchmark |60.0%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test |60.0%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |60.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |60.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |60.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |60.0%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |60.0%| [TS] {RESULT} ydb/library/yql/providers/dq/scheduler/ut/unittest >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps |60.0%| [LD] {RESULT} $(B)/ydb/core/blob_depot/ut/ydb-core-blob_depot-ut |60.0%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |60.0%| [LD] {RESULT} $(B)/ydb/library/yaml_config/ut/ydb-library-yaml_config-ut |60.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/ydb-core-blobstorage-vdisk-hulldb-fresh-ut |60.0%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |60.0%| [TS] {RESULT} ydb/tests/functional/serializable/import_test |60.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut_large/ydb-core-tablet_flat-ut_large |60.1%| [LD] {RESULT} $(B)/ydb/core/base/ut_auth/ydb-core-base-ut_auth |60.1%| [TS] {RESULT} ydb/tests/functional/scheme_shard/import_test |60.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/provider/ut/pushdown/yql-providers-generic-provider-ut-pushdown |60.1%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/import_test |60.1%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |60.1%| [TS] {RESULT} ydb/library/yql/providers/dq/provider/ut/unittest >> test.py::test[column_order-insert--ForceBlocks] [GOOD] >> test.py::test[column_order-insert--Results] |60.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/pdisk/ut/ydb-core-blobstorage-pdisk-ut >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] |60.1%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |60.1%| [TS] {RESULT} ydb/core/kqp/ut/federated_query/common/clang_format |60.1%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/import_test |60.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/codecs/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod |60.1%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb >> TVectorIndexTests::CreateTablePrefixCovering |60.1%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/flake8 >> TCdcStreamTests::Basic |60.1%| [LD] {RESULT} $(B)/ydb/library/formats/arrow/ut/ydb-library-formats-arrow-ut |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_double_indexed/ydb-core-tx-scheme_board-ut_double_indexed >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[join-star_join_inners--Results] >> TBtreeIndexTPartLarge::History [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] >> test.py::test[join-left_cast_to_string--ForceBlocks] [GOOD] |60.1%| [LD] {RESULT} $(B)/ydb/library/yaml_config/tools/dump/yaml-to-proto-dump >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> test.py::test[join-left_cast_to_string--Results] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] |60.1%| [LD] {RESULT} $(B)/ydb/core/config/validation/column_shard_config_validator_ut/column_shard_config_validator_ut |60.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/incrhuge/ut/ydb-core-blobstorage-incrhuge-ut |60.1%| [TS] {RESULT} ydb/core/fq/libs/test_connection/ut/unittest >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] |60.1%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test |60.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreeImplTable [GOOD] |60.2%| [LD] {RESULT} $(B)/ydb/core/config/validation/auth_config_validator_ut/core-config-validation-auth_config_validator_ut |60.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/base/ut/ydb-core-blobstorage-base-ut |60.2%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |60.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |60.2%| [LD] {RESULT} $(B)/ydb/tests/stability/tool/tool |60.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/common/ut/ydb-library-yql-providers-s3-common-ut >> test.py::test[aggregate-group_by_cube_join_count--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_cube_join_count--Results] |60.2%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |60.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |60.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixCovering [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:56:19.252584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:56:19.252607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:19.252611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:56:19.252616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:56:19.252621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:56:19.252630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:56:19.252646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:19.252658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:56:19.252722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:56:19.264786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:56:19.264806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:19.272314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:56:19.272367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:56:19.272395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:56:19.273653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:56:19.273697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:56:19.273789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.273937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:56:19.274663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.274706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:56:19.274896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:19.274905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.274920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:56:19.274926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:19.274932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:56:19.274954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.276022Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:56:19.293289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:56:19.293355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.293411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:56:19.293454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:56:19.293463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.294135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.294160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:56:19.294200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.294208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:56:19.294213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:56:19.294218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:56:19.294667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.294677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.294682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:56:19.295106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.295115Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.295120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.295126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.295638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:56:19.295959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:56:19.295992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:56:19.296150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.296169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:56:19.296179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.296248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:56:19.296254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.296279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:56:19.296288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:56:19.296624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:19.296631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:19.296663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.296669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:56:19.296678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.296683Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:56:19.296694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:19.296698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.296702Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:19.296705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.296708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:56:19.296713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.296717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:56:19.296720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:56:19.296730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:56:19.296735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:56:19.296738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:56:19.297095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:56:19.297111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ration: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:56:19.514191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:56:19.514204Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:56:19.514207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:56:19.514243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:56:19.514257Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-07-08T11:56:19.514260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-07-08T11:56:19.514291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:56:19.514301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:56:19.514304Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-07-08T11:56:19.514308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-07-08T11:56:19.514317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-07-08T11:56:19.514991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:56:19.515072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-07-08T11:56:19.515076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-07-08T11:56:19.515079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-07-08T11:56:19.515082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-07-08T11:56:19.515086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-07-08T11:56:19.515240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515246Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-07-08T11:56:19.515282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-07-08T11:56:19.515285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-07-08T11:56:19.515289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-07-08T11:56:19.515292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-07-08T11:56:19.515295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-07-08T11:56:19.515503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:56:19.515551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-07-08T11:56:19.515554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-07-08T11:56:19.515557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-07-08T11:56:19.515560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-07-08T11:56:19.515564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-07-08T11:56:19.515615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.515688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:56:19.515699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-07-08T11:56:19.515702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-07-08T11:56:19.515706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-07-08T11:56:19.515709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-07-08T11:56:19.515712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-07-08T11:56:19.515724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:456:2402] message: TxId: 102 2025-07-08T11:56:19.515728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-07-08T11:56:19.515734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:56:19.515738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:56:19.515752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:56:19.515756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-07-08T11:56:19.515759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-07-08T11:56:19.515764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:56:19.515767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-07-08T11:56:19.515770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-07-08T11:56:19.515776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:56:19.515780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-07-08T11:56:19.515783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-07-08T11:56:19.515788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:56:19.515792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:4 2025-07-08T11:56:19.515794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:4 2025-07-08T11:56:19.515800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-07-08T11:56:19.515841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.515958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:56:19.516404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:56:19.516413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:603:2542] TestWaitNotification: OK eventTxId 102 |60.2%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/import_test |60.2%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_builder/unittest |60.2%| [TS] {RESULT} ydb/tests/library/clients/flake8 >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes |60.2%| [TS] {RESULT} ydb/mvp/core/ut/unittest |60.2%| [TS] {RESULT} ydb/library/yql/providers/dq/actors/ut/unittest |60.2%| [TS] {RESULT} ydb/tests/stress/transfer/tests/import_test |60.2%| [TS] {RESULT} ydb/library/actors/dnsresolver/ut/unittest |60.2%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |60.2%| [TM] {RESULT} ydb/core/blob_depot/ut/unittest |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |60.2%| [TS] {RESULT} ydb/tests/fq/common/flake8 |60.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/ydb-core-blobstorage-vdisk-hulldb-compstrat-ut |60.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |60.3%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/flake8 |60.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |60.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> test.py::test[union_all-union_all_subexpr-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute |60.3%| [TS] {RESULT} ydb/tests/sql/flake8 |60.3%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |60.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |60.3%| [TS] {RESULT} ydb/tests/stability/ydb/flake8 |60.3%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/flake8 |60.3%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/import_test |60.3%| [TS] {RESULT} ydb/library/actors/cppcoro/ut/unittest |60.3%| [TS] {RESULT} ydb/library/yql/dq/actors/compute/ut/unittest >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[view-all_from_view--ForceBlocks] |60.3%| [TS] {RESULT} ydb/tests/library/wardens/py2_flake8 |60.3%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |60.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk |60.3%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |60.3%| [TS] {RESULT} ydb/tests/functional/config/import_test |60.3%| [TS] {RESULT} ydb/tests/functional/config/flake8 >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |60.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/engines/ut/unittest >> test.py::test[order_by-assume_cut_prefix--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] >> test.py::test[schema-select_all-yamred_dsv-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] |60.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |60.4%| [LD] {RESULT} $(B)/ydb/core/log_backend/ut/ydb-core-log_backend-ut >> TCdcStreamTests::DocApiNegative [GOOD] |60.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |60.4%| [TS] {RESULT} ydb/core/fq/libs/http_api_client/flake8 |60.4%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 >> TCdcStreamTests::Negative >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard |60.4%| [TS] {RESULT} ydb/core/resource_pools/ut/unittest |60.4%| [TS] {RESULT} ydb/library/yql/providers/yt/actors/ut/unittest |60.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |60.4%| [TS] {RESULT} ydb/tests/functional/hive/flake8 >> test.py::test[blocks-pg_top_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_top_sort--Results] |60.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |60.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |60.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 |60.4%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |60.4%| [TS] {RESULT} ydb/tests/example/flake8 |60.4%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |60.4%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/ut/ydb-core-formats-arrow-ut |60.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |60.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/dq/actors/ut/ydb-library-yql-providers-dq-actors-ut >> test.py::test[order_by-order_by_tuple_and_member-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[pg-select_qstarref1-default.txt-Results] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo |60.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |60.4%| [TS] {RESULT} ydb/library/benchmarks/runner/flake8 |60.5%| [TA] {RESULT} $(B)/ydb/library/actors/helpers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--ForceBlocks] |60.5%| [TS] {RESULT} ydb/core/erasure/ut_perf/unittest |60.5%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/import_test |60.5%| [TM] {RESULT} ydb/library/actors/interconnect/ut_huge_cluster/unittest >> test.py::test[blocks-finalize_hashed_keys--Results] [GOOD] >> test.py::test[blocks-pg_call--ForceBlocks] |60.5%| [TS] {RESULT} ydb/core/io_formats/arrow/scheme/ut/unittest >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_equals--ForceBlocks] >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream |60.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |60.5%| [TS] {RESULT} ydb/public/tools/ydb_recipe/import_test |60.5%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/replay/flake8 >> test.py::test[column_order-insert--Results] [GOOD] |60.5%| [TM] {RESULT} ydb/core/blobstorage/ut_group/unittest >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] >> test.py::test[column_order-select_plain_nosimple-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable |60.5%| [TS] {RESULT} ydb/tests/stress/statistics_workload/import_test |60.5%| [TM] {RESULT} ydb/library/yql/providers/pq/provider/ut/unittest |60.5%| [TS] {RESULT} ydb/tests/tools/nemesis/driver/flake8 |60.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |60.5%| [TS] {RESULT} ydb/tests/library/clients/py2_flake8 |60.5%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |60.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |60.5%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::test[hor_join-yield_on-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-yield_on-default.txt-Results] >> test.py::test[join-mergejoin_force_per_link-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_per_link-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> test.py::test[schema-insert_sorted-read_schema-Results] [GOOD] >> test.py::test[schema-select_all-yamred_dsv-Results] [GOOD] >> TPQCachingProxyTest::TestPublishAndForget >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] |60.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::test[schema-select_field-read_schema-Results] >> test.py::test[select-where_in-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] |60.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> test.py::test[join-left_cast_to_string--Results] [GOOD] >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable |60.6%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator/unittest |60.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |60.6%| [TS] {RESULT} ydb/tests/library/compatibility/downloader/flake8 |60.6%| [TS] {RESULT} ydb/core/fq/libs/signer/ut/unittest |60.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> test.py::test[join-left_semi_with_other--ForceBlocks] >> test.py::test[union_all-union_all_subexpr-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> test.py::test[order_by-assume_cut_prefix--Results] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] |60.6%| [TS] {RESULT} ydb/core/base/generated/ut/unittest |60.6%| [TS] {RESULT} ydb/core/blobstorage/base/ut/gtest |60.6%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |60.6%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |60.6%| [TS] {RESULT} ydb/tests/functional/restarts/import_test |60.6%| [TA] {RESULT} $(B)/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TS] {RESULT} ydb/public/tools/ydb_recipe/flake8 |60.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |60.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream |60.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |60.6%| [TS] {RESULT} ydb/tests/stress/kv/tests/import_test ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-07-08T11:56:23.122276Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:56:23.122305Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-07-08T11:56:23.126139Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:56:23.126171Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-07-08T11:56:23.126191Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-07-08T11:56:23.126198Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-07-08T11:56:23.126214Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |60.6%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |60.6%| [TS] {RESULT} ydb/tests/stability/tool/import_test |60.6%| [TS] {RESULT} ydb/apps/dstool/flake8 |60.7%| [TS] {RESULT} ydb/tests/library/ut/import_test |60.7%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> test.py::test[order_by-order_by_tuple_and_member-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-ForceBlocks] >> TCdcStreamWithInitialScanTests::InitialScanEnabled |60.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test |60.7%| [TS] {RESULT} ydb/tests/fq/plans/import_test |60.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |60.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |60.7%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/import_test |60.7%| [TS] {RESULT} ydb/tests/olap/scenario/import_test |60.7%| [TS] {RESULT} ydb/core/config/validation/auth_config_validator_ut/unittest |60.7%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/flake8 >> test.py::test[blocks-pg_top_sort--Results] [GOOD] >> test.py::test[blocks-string_as_agg_key--ForceBlocks] |60.7%| [LD] {RESULT} $(B)/ydb/core/formats/arrow/accessor/sparsed/ut/ydb-core-formats-arrow-accessor-sparsed-ut >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled |60.7%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |60.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/huge/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |60.7%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |60.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |60.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |60.8%| [TM] {RESULT} ydb/library/union_copy_set/ut/unittest >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress |60.8%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/unittest |60.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/py3test |60.8%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |60.8%| [TM] {RESULT} ydb/core/driver_lib/version/ut/unittest |60.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> test.py::test[view-all_from_view--ForceBlocks] [GOOD] >> test.py::test[view-all_from_view--Results] >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail |60.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |60.8%| [TS] {RESULT} ydb/tests/stress/transfer/tests/flake8 |60.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TPQCachingProxyTest::MultipleSessions >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--Results] |60.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |60.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::test[aggregate-group_by_session_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_star--Results] |60.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |60.8%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |60.8%| [TS] {RESULT} ydb/tests/functional/wardens/import_test >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> TPQCachingProxyTest::MultipleSessions [GOOD] |60.8%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/flake8 |60.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::test[aggregate-group_by_cube_join_count--Results] [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail |60.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |60.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 |60.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |60.8%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/import_test >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--ForceBlocks] |60.8%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |60.9%| [TS] {RESULT} ydb/library/actors/http/ut/unittest >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream |60.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-07-08T11:56:25.050808Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:56:25.050834Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-07-08T11:56:25.054194Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:56:25.054216Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-07-08T11:56:25.054229Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-07-08T11:56:25.054236Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-07-08T11:56:25.054242Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-07-08T11:56:25.054252Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-07-08T11:56:25.054260Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-07-08T11:56:25.054267Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-07-08T11:56:25.054271Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 |60.9%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |60.9%| [TS] {RESULT} ydb/library/yaml_config/tools/simple_json_diff/flake8 |60.9%| [TS] {RESULT} ydb/library/actors/cppcoro/corobenchmark/corobenchmark |60.9%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |60.9%| [TM] {RESULT} ydb/core/tablet_flat/ut_util/unittest |60.9%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |60.9%| [TS] {RESULT} ydb/library/benchmarks/runner/result_convert/import_test |60.9%| [TS] {RESULT} ydb/core/config/validation/ut/unittest |60.9%| [TM] {RESULT} ydb/library/yql/tests/sql/solomon/pytest |60.9%| [TS] {RESULT} ydb/library/actors/core/harmonizer/ut/unittest >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[blocks-pg_call--ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] |60.9%| [TS] {RESULT} ydb/core/formats/arrow/ut/unittest |60.9%| [TS] {RESULT} ydb/library/yaml_config/ut_transform/import_test >> test.py::test[blocks-pg_call--Results] >> test.py::test[select-table_content_from_sort_desc-default.txt-Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-ForceBlocks] [GOOD] |60.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |60.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |60.9%| [TS] {RESULT} ydb/tests/olap/load/flake8 >> test.py::test[limit-insert_with_limit--ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> test.py::test[view-all_from_view--Results] [GOOD] >> test.py::test[view-file_inner_library--ForceBlocks] >> test.py::test[select-where_with_lambda--Results] >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> test.py::test[column_order-select_subquery-default.txt-Results] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions |60.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |60.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/dread_cache_service/ut/unittest |60.9%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 >> ErasureBrandNew::Block42_restore [GOOD] >> ErasureBrandNew::Block42_restore_benchmark >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] >> test.py::test[schema-select_with_map-partial_read_schema-ForceBlocks] [GOOD] >> test.py::test[schema-select_with_map-partial_read_schema-Results] >> test.py::test[schema-select_field-read_schema-Results] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] |61.0%| [TS] {RESULT} ydb/tests/sql/large/flake8 |61.0%| [TS] {RESULT} ydb/core/formats/arrow/accessor/sparsed/ut/unittest |61.0%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |61.0%| [TS] {RESULT} ydb/tests/library/wardens/flake8 |61.0%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |61.0%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |61.0%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits |61.0%| [TS] {RESULT} ydb/library/yaml_config/static_validator/ut/example_configs/unittest |61.0%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |61.0%| [TM] {RESULT} ydb/library/yaml_config/ut_transform/py3test |61.0%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 |61.0%| [TS] {RESULT} ydb/tests/functional/large_serializable/flake8 >> test.py::test[order_by-order_by_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] |61.0%| [TS] {RESULT} ydb/core/tx/sequenceshard/public/ut/unittest |61.0%| [TS] {RESULT} ydb/core/config/validation/column_shard_config_validator_ut/unittest |61.0%| [TS] {RESULT} ydb/tests/library/sqs/flake8 >> test.py::test[weak_field-weak_field_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_type-default.txt-Results] |61.0%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |61.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> test.py::test[join-left_semi_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_semi_with_other--Results] |61.0%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |61.0%| [TS] {RESULT} ydb/tests/functional/sqs/large/import_test |61.0%| [TM] {RESULT} ydb/core/blobstorage/backpressure/ut_client/unittest |61.1%| [TS] {RESULT} ydb/core/viewer/json/ut/unittest >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream |61.1%| [TS] {RESULT} ydb/library/workload/tpch/ut/unittest |61.1%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |61.1%| [TS] {RESULT} ydb/library/yql/dq/opt/ut/unittest |61.1%| [TS] {RESULT} ydb/library/benchmarks/runner/run_tests/import_test >> test.py::test[join-mergejoin_narrows_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[blocks-pg_call--Results] [GOOD] >> test.py::test[column_order-select_subquery-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] >> test.py::test[blocks-pg_to_interval--ForceBlocks] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[lineage-error_type--ForceBlocks] |61.1%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 >> test.py::test[join-mergejoin_narrows_output_sort-off-Results] [SKIPPED] |61.1%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test |61.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::test[join-premap_common_cross--ForceBlocks] >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> test.py::test[lineage-error_type--ForceBlocks] [SKIPPED] >> test.py::test[lineage-error_type--Results] [SKIPPED] >> test.py::test[aggregate-group_by_session_star--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] >> test.py::test[schema-select_with_map-partial_read_schema-Results] [GOOD] >> test.py::test[schema-select_yamr_fields--ForceBlocks] >> test.py::test[lineage-select_field-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] |61.1%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |61.1%| [TS] {RESULT} ydb/tests/olap/lib/flake8 >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> test.py::test[order_by-singular-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] |61.1%| [TS] {RESULT} ydb/tests/library/test_meta/flake8 |61.1%| [TS] {RESULT} ydb/core/external_sources/ut/unittest >> test.py::test[column_order-insert_with_new_cols--ForceBlocks] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] |61.1%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/import_test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 |61.1%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |61.1%| [TS] {RESULT} ydb/library/benchmarks/runner/runner/flake8 |61.1%| [TS] {RESULT} ydb/tests/tools/pq_read/test/import_test |61.1%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |61.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |61.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |61.1%| [TS] {RESULT} ydb/library/yql/providers/s3/common/ut/unittest >> test.py::test[blocks-string_as_agg_key--ForceBlocks] [GOOD] >> test.py::test[blocks-string_as_agg_key--Results] |61.2%| [TS] {RESULT} ydb/tests/tools/ydb_serializable/flake8 |61.2%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun |61.2%| [TM] {RESULT} ydb/library/intersection_tree/ut/unittest >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] |61.2%| [TS] {RESULT} ydb/public/tools/local_ydb/flake8 |61.2%| [TS] {RESULT} ydb/core/formats/arrow/accessor/composite/ut/unittest >> test.py::test[weak_field-weak_field_type-default.txt-Results] [GOOD] >> test.py::test[window-full/leadlag_compact--ForceBlocks] |61.2%| [TA] {RESULT} $(B)/ydb/core/base/ut_auth/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TS] {RESULT} ydb/tests/library/serializability/flake8 |61.2%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 |61.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |61.2%| [TS] {RESULT} ydb/tools/tstool/flake8 |61.2%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |61.2%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |61.2%| [TS] {RESULT} ydb/tests/olap/import_test |61.2%| [TS] {RESULT} ydb/tests/functional/audit/flake8 >> test.py::test[optimizers-passthrough_sortness_over_map-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> test.py::test[view-file_inner_library--ForceBlocks] [GOOD] >> test.py::test[view-file_inner_library--Results] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] |61.2%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/import_test |61.2%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |61.2%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |61.2%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |61.2%| [TS] {RESULT} ydb/library/yql/providers/s3/actors/ut/unittest >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless >> test.py::test[select-where_with_lambda--Results] [GOOD] |61.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/s3/actors/ut/ydb-library-yql-providers-s3-actors-ut |61.2%| [TA] {RESULT} $(B)/ydb/tests/library/test-results/flake8/{meta.json ... results_accumulator.log} >> ErasureBrandNew::Block42_restore_benchmark [GOOD] >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] |61.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |61.3%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |61.3%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> test.py::test[order_by-singular-default.txt-Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> test.py::test[aggregate-group_by_expr_semi_join--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_combine--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> test.py::test[aggregate-group_by_expr_semi_join--Results] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 |61.3%| [TS] {RESULT} ydb/core/config/init/ut/unittest |61.3%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |61.3%| [TS] {RESULT} ydb/core/client/metadata/ut/unittest |61.3%| [TS] {RESULT} ydb/core/fq/libs/db_id_async_resolver_impl/ut/unittest |61.3%| [TS] {RESULT} ydb/library/yql/providers/s3/object_listers/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> ErasureBrandNew::Block42_restore_benchmark [GOOD] Test command err: totalSize# 500341636 period1# 1.472542s period2# 0.649364s MB/s1# 339.7808932 MB/s2# 770.5102777 factor# 2.267668057 >> test.py::test[view-file_inner_library--Results] [GOOD] >> test.py::test[column_order-insert_with_new_cols--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[count-count_all_grouped-empty-ForceBlocks] [GOOD] >> test.py::test[count-count--ForceBlocks] >> test.py::test[count-count_all_grouped-empty-Results] >> test.py::test[schema-select_with_map-sorted_desc-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 >> test.py::test[schema-select_yamr_fields--ForceBlocks] [GOOD] |61.3%| [TM] {RESULT} ydb/library/actors/interconnect/ut/unittest |61.3%| [TS] {RESULT} ydb/tests/functional/compatibility/import_test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> test.py::test[schema-select_yamr_fields--Results] |61.3%| [TS] {RESULT} ydb/tests/postgres_integrations/library/ut/py3test |61.3%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |61.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |61.3%| [TS] {RESULT} ydb/library/benchmarks/runner/import_test |61.3%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |61.3%| [TS] {RESULT} ydb/library/actors/wilson/ut/unittest |61.3%| [TS] {RESULT} ydb/core/blobstorage/crypto/ut/unittest >> test.py::test[join-left_semi_with_other--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] |61.3%| [TA] {RESULT} $(B)/ydb/tests/library/test-results/py2_flake8/{meta.json ... results_accumulator.log} |61.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |61.4%| [TS] {RESULT} ydb/library/yaml_config/validator/ut/validator_checks/unittest >> test.py::test[blocks-pg_to_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_interval--Results] >> test.py::test[blocks-date_equals--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals--Results] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflight [GOOD] >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock |61.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |61.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test |61.4%| [TS] {RESULT} ydb/tests/fq/yds/import_test |61.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::test[blocks-string_as_agg_key--Results] [GOOD] >> test.py::test[blocks-string_filter--ForceBlocks] >> test.py::test[ansi_idents-order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] |61.4%| [TS] {RESULT} ydb/tests/functional/api/import_test |61.4%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/import_test |61.4%| [TS] {RESULT} ydb/core/formats/arrow/accessor/sub_columns/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 |61.4%| [TS] {RESULT} ydb/core/tx/scheme_board/ut_double_indexed/unittest |61.4%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |61.4%| [TS] {RESULT} ydb/core/debug_tools/ut/unittest |61.4%| [TS] {RESULT} ydb/tests/functional/api/flake8 |61.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |61.4%| [TS] {RESULT} ydb/library/yql/providers/dq/runtime/ut/unittest |61.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |61.4%| [TS] {RESULT} ydb/library/yql/providers/generic/provider/ut/pushdown/unittest |61.4%| [TS] {RESULT} ydb/library/yql/providers/s3/provider/ut/unittest |61.4%| [TS] {RESULT} ydb/core/fq/libs/hmac/ut/unittest >> test.py::test[count-count_all_grouped-empty-Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 |61.4%| [TS] {RESULT} ydb/core/metering/ut/unittest |61.5%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |61.5%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |61.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |61.5%| [TS] {RESULT} ydb/core/pgproxy/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 |61.5%| [TS] {RESULT} ydb/services/persqueue_cluster_discovery/cluster_ordering/ut/unittest |61.5%| [TS] {RESULT} ydb/library/yql/utils/actor_system/clang_format >> test.py::test[join-premap_common_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_cross--Results] >> test.py::test[schema-select_yamr_fields--Results] [GOOD] >> test.py::test[select-if-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_udf_duo--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] >> TYardTest::TestLogWriteCutEqualRandomWait [GOOD] >> TYardTest::TestLogWriteCutUnequal |61.5%| [TS] {RESULT} ydb/tests/library/serializability/py2_flake8 |61.5%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |61.5%| [TS] {RESULT} ydb/public/tools/lib/cmds/ut/import_test |61.5%| [TS] {RESULT} ydb/tests/library/compatibility/downloader/import_test >> test.py::test[blocks-pg_to_interval--Results] [GOOD] >> test.py::test[blocks-sort_two_asc--ForceBlocks] |61.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |61.5%| [TA] {RESULT} $(B)/ydb/library/yql/providers/common/http_gateway/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_mirror3of4/ydb-core-blobstorage-ut_mirror3of4 |61.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/query/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |61.5%| [TS] {RESULT} ydb/library/yaml_config/ut/unittest |61.5%| [TS] {RESULT} ydb/core/external_sources/object_storage/inference/ut/gtest |61.5%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |61.5%| [TS] {RESULT} ydb/library/formats/arrow/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 |61.5%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 >> test.py::test[aggregate-group_by_expr_semi_join--Results] [GOOD] |61.6%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |61.6%| [TS] {RESULT} ydb/tests/functional/benchmarks_init/flake8 |61.6%| [TS] {RESULT} ydb/apps/dstool/import_test |61.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |61.6%| [TS] {RESULT} ydb/tests/olap/flake8 >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 |61.6%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |61.6%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |61.6%| [LD] {RESULT} $(B)/ydb/library/actors/wilson/ut/ydb-library-actors-wilson-ut |61.6%| [TS] {RESULT} ydb/tests/functional/rename/import_test |61.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TS] {RESULT} ydb/library/benchmarks/runner/result_compare/import_test >> test.py::test[seq_mode-action_shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[pg-aggregate_combine--ForceBlocks] [GOOD] |61.6%| [TS] {RESULT} ydb/core/blobstorage/vdisk/defrag/ut/unittest >> test.py::test[optimizers-reduce_with_aux_sort_column--ForceBlocks] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> test.py::test[window-full/leadlag_compact--ForceBlocks] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] >> test.py::test[pg-aggregate_combine--Results] >> test.py::test[weak_field-weak_field_infer_scheme--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> test.py::test[window-full/leadlag_compact--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 |61.6%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |61.6%| [TS] {RESULT} ydb/core/tx/long_tx_service/public/ut/unittest >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> test.py::test[schema-select_with_map-sorted_desc-Results] [GOOD] >> test.py::test[schema-skip_complex_type--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> test.py::test[blocks-string_filter--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[blocks-string_filter--Results] >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] |61.6%| [TS] {RESULT} ydb/tests/example/import_test >> TYardTest::TestLogWriteCutUnequal [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> test.py::test[join-lookupjoin_semi_1o--ForceBlocks] [GOOD] >> TYardTest::TestLogMultipleWriteRead >> test.py::test[join-lookupjoin_semi_1o--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 |61.6%| [TS] {RESULT} ydb/tests/functional/ttl/import_test |61.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |61.6%| [TS] {RESULT} ydb/tests/stress/statistics_workload/flake8 |61.6%| [TS] {RESULT} ydb/tests/library/ut/flake8 |61.6%| [TM] {RESULT} ydb/core/blobstorage/ut_mirror3of4/unittest >> test.py::test[select-if-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-if-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] |61.6%| [TM] {RESULT} ydb/core/fq/libs/control_plane_storage/internal/ut/unittest |61.7%| [TS] {RESULT} ydb/tests/library/flavours/flake8 |61.7%| [LD] {RESULT} $(B)/ydb/core/pgproxy/ut/ydb-core-pgproxy-ut >> TYardTest::TestLogMultipleWriteRead [GOOD] >> TYardTest::TestLogContinuityPersistence >> test.py::test[pg-aggregate_combine--Results] [GOOD] >> test.py::test[window-current/session_extended--ForceBlocks] |61.7%| [TS] {RESULT} ydb/core/viewer/tests/import_test |61.7%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] >> test.py::test[optimizers-reduce_with_aux_sort_column--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--ForceBlocks] >> test.py::test[blocks-sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_asc--Results] |61.7%| [TS] {RESULT} ydb/core/config/ut/unittest |61.7%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |61.7%| [TS] {RESULT} ydb/mvp/oidc_proxy/ut/unittest |61.7%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |61.7%| [TS] {RESULT} ydb/tests/functional/tenants/import_test >> test.py::test[optimizers-sorted_sql_in--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-sorted_sql_in--Results] [SKIPPED] |61.8%| [TS] {RESULT} ydb/tests/olap/docs/generator/import_test |61.8%| [TS] {RESULT} ydb/core/log_backend/ut/unittest |61.8%| [TS] {RESULT} ydb/core/fq/libs/metrics/ut/unittest >> test.py::test[blocks-date_equals--Results] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_bug8923-default.txt-Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TYardTest::TestLogContinuityPersistence [GOOD] >> TYardTest::TestLogContinuityPersistenceLarge >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> test.py::test[blocks-string_filter--Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_count_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count_filter--Results] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:56:16.862361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:56:16.862388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:16.862394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:56:16.862399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:56:16.862410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:56:16.862414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:56:16.862423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:16.862435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:56:16.862504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:56:16.875084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:56:16.875103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:16.878858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:56:16.878893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:56:16.878912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:56:16.880452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:56:16.880487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:56:16.880675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:16.880883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:56:16.882784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:16.882816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:56:16.882987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:16.882994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:16.883007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:56:16.883012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:16.883017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:56:16.883034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.884373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:56:16.921006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:56:16.921088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.921138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:56:16.921183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:56:16.921196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.922267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:16.922294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:56:16.922342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.922357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:56:16.922362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:56:16.922369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:56:16.922726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.922736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:16.922741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:56:16.923024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.923032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.923037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:16.923044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:56:16.923611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:56:16.923930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:56:16.923963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:56:16.924123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:16.924145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:56:16.924151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:16.924208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:56:16.924215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:16.924240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:56:16.924250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:56:16.924588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:16.924595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:16.924631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:16.924636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:56:16.924646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:16.924654Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:56:16.924664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:16.924668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:16.924673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:16.924676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:16.924681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:56:16.924686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:16.924692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:56:16.924695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:56:16.924704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:56:16.924710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:56:16.924714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:56:16.925101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:56:16.925114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-07-08T11:56:30.334872Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-07-08T11:56:30.334876Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-07-08T11:56:30.334879Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-07-08T11:56:30.334960Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.334973Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.334977Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-07-08T11:56:30.334983Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-07-08T11:56:30.334988Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-07-08T11:56:30.335154Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.335166Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.335171Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-07-08T11:56:30.335176Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-07-08T11:56:30.335180Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-07-08T11:56:30.335200Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-07-08T11:56:30.336144Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.336188Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-07-08T11:56:30.348924Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 240 } } 2025-07-08T11:56:30.348968Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-07-08T11:56:30.348999Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 240 } } 2025-07-08T11:56:30.349016Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 240 } } 2025-07-08T11:56:30.349291Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 754 RawX2: 81604381267 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-07-08T11:56:30.349299Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-07-08T11:56:30.349312Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 754 RawX2: 81604381267 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-07-08T11:56:30.349320Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2025-07-08T11:56:30.349328Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 754 RawX2: 81604381267 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-07-08T11:56:30.349342Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-07-08T11:56:30.349346Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-07-08T11:56:30.349350Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-07-08T11:56:30.349358Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-07-08T11:56:30.350259Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-07-08T11:56:30.350672Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-07-08T11:56:30.350749Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-07-08T11:56:30.350758Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-07-08T11:56:30.350771Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-07-08T11:56:30.350776Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-07-08T11:56:30.350780Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-07-08T11:56:30.350783Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-07-08T11:56:30.350792Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-07-08T11:56:30.350797Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-07-08T11:56:30.350803Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-07-08T11:56:30.350808Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-07-08T11:56:30.350820Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-07-08T11:56:30.350824Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-07-08T11:56:30.350827Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-07-08T11:56:30.350842Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-07-08T11:56:30.350846Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-07-08T11:56:30.350850Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-07-08T11:56:30.350854Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-07-08T11:56:33.214427Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-07-08T11:56:33.214518Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 108us result status StatusNameConflict 2025-07-08T11:56:33.214574Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-07-08T11:56:35.750040Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-07-08T11:56:35.750121Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 94us result status StatusNameConflict 2025-07-08T11:56:35.750162Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> test.py::test[join-lookupjoin_semi_1o--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] >> test.py::test[select-if-default.txt-Results] [GOOD] >> test.py::test[select-logical_ops-default.txt-ForceBlocks] >> test.py::test[dq-precompute_parallel_indep--ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-presort_order_by_table-default.txt-Results] >> test.py::test[dq-precompute_parallel_indep--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] >> test.py::test[window-full/leadlag_compact--Results] [GOOD] >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] >> test.py::test[count-count--ForceBlocks] [GOOD] >> test.py::test[count-count--Results] >> test.py::test[blocks-sort_two_asc--Results] [GOOD] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] >> TYardTest::TestLogContinuityPersistenceLarge [GOOD] >> TYardTest::TestLogWriteLsnConsistency >> test.py::test[order_by-literal_take_zero_sort--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[schema-user_schema_mix1--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> test.py::test[join-mergejoin_saves_output_sort_unmatched--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> TYardTest::TestLogWriteLsnConsistency [GOOD] >> TYardTest::TestLotsOfTinyAsyncLogLatency >> test.py::test[blocks-combine_all_count_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> test.py::test[join-premap_common_inner_filter-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_filter-off-Results] [SKIPPED] >> test.py::test[join-pullup_cross--ForceBlocks] >> TYardTest::TestLotsOfTinyAsyncLogLatency [GOOD] >> TYardTest::TestLogLatency >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--ForceBlocks] [SKIPPED] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] >> test.py::test[pg-select_starref1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_max_sorted_tables--Results] [SKIPPED] >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> TYardTest::TestLogLatency [GOOD] >> TYardTest::TestMultiYardFirstRecordToKeep >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> test.py::test[order_by-presort_order_by_table-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] >> TYardTest::TestMultiYardFirstRecordToKeep [GOOD] >> TYardTest::TestLogOverwriteRestarts >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SharedThreads::RegistrationAndPassingAwayActorsStrictPool [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsTailStrictPool >> test.py::test[coalesce-coalesce_few_real-default.txt-ForceBlocks] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> test.py::test[join-lookupjoin_semi_empty-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_empty-off-Results] >> test.py::test[join-lookupjoin_semi_empty-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> test.py::test[select-logical_ops-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-logical_ops-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] >> test.py::test[window-current/session_extended--ForceBlocks] [GOOD] >> test.py::test[window-current/session_extended--Results] >> test.py::test[schema-user_schema_mix1--Results] [GOOD] >> test.py::test[select-autoextract_source_value-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[flatten_by-flatten_few_fields--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_few_fields--Results] >> test.py::test[in-in_ansi_join--ForceBlocks] [GOOD] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_two_asc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_asc--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 >> test.py::test[blocks-combine_hashed_minmax_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> test.py::test[join-pullup_cross--ForceBlocks] [GOOD] >> test.py::test[join-pullup_cross--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_star-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-15210_sqlin--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> test.py::test[key_filter-empty_range--Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[pg-tpcds-q12-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[count-count--Results] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] >> test.py::test[blocks-top_sort_two_asc--Results] [GOOD] >> test.py::test[column_group-hint_empty_grp_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_empty_grp_fail--Results] [SKIPPED] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] >> test.py::test[window-current/session_extended--Results] [GOOD] >> test.py::test[window-full/aggregations--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> test.py::test[table_range-limit_with_table_path_over_sorted_range--Results] [GOOD] >> test.py::test[tpch-q15-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_star-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> test.py::test[pg-select_from_columns_star-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_value_desc-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_few_fields--Results] [GOOD] >> test.py::test[blocks-combine_hashed_minmax_nested--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] >> test.py::test[blocks-date_sub_interval--ForceBlocks] >> test.py::test[join-pullup_cross--Results] [GOOD] >> test.py::test[join-pullup_exclusion--ForceBlocks] >> test.py::test[select-autoextract_source_value-default.txt-Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> test.py::test[window-generic/aggregations_before_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_before_current--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> test.py::test[aggregate-group_by_ru_with_window_func--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> test.py::test[distinct-distinct_columns_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> test.py::test[select-multi_source_issue-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-multi_source_issue-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> test.py::test[count-count_no_grouping-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] >> test.py::test[optimizers-yql-3455_filter_sorted--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] >> test.py::test[pg-select_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> test.py::test[column_order-select_distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_distinct_star-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> TYardTest::TestLogOverwriteRestarts [GOOD] >> TYardTest::TestLogOwerwrite >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> test.py::test[window-generic/aggregations_before_current--Results] [GOOD] >> test.py::test[window-win_by_all_aggregate--ForceBlocks] >> TYardTest::TestLogOwerwrite [GOOD] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] [GOOD] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> TPDiskRaces::KillOwnerWhileDecommittingWithInflightMock [GOOD] >> TPDiskRaces::OwnerRecreationRaces >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[select-bit_ops-default.txt-Results] [GOOD] >> test.py::test[select-calculated_values-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TYardTest::TestLogOwerwrite [GOOD] Test command err: 2025-07-08T11:54:29.269721Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.271495Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:29.272089Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1 2025-07-08T11:54:29.310681Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.310701Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1 2025-07-08T11:54:29.310890Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 6213698586513314116 MagicLogChunk: 15888644149028467106 MagicDataChunk: 13117443384730957797 MagicSysLogChunk: 16872481131366055476 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669289976 (2025-07-08T11:54:29.289976Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.312410Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.313199Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.313374Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.313682Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.368846Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 2054342 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.381840Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.382004Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 6213698586513314116 MagicLogChunk: 15888644149028467106 MagicDataChunk: 13117443384730957797 MagicSysLogChunk: 16872481131366055476 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669289976 (2025-07-08T11:54:29.289976Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.389066Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 2069792 NonceLog# 2054342 NonceData# 1681614} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:29.389742Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:29.389771Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 1 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 4096} PDiskId# 1 2025-07-08T11:54:29.389786Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 4096} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.390004Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.481113Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:29.491521Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.491711Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 6213698586513314116 MagicLogChunk: 15888644149028467106 MagicDataChunk: 13117443384730957797 MagicSysLogChunk: 16872481131366055476 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669289976 (2025-07-08T11:54:29.289976Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.492659Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 3800155 NonceLog# 3798563 NonceData# 3723348} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:29.494270Z :BS_PDISK WARN: {LR016@blobstorage_pdisk_logreader.cpp:711} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 316 nonce jump2 IsEndOfSplice# false " replacing ChunkInfo->DesiredPrevChunkLastNonce# "# 0 " with nonceJumpLogPageHeader2->PreviousNonce# "# 0 PDiskId# 1 2025-07-08T11:54:29.494293Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 2 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 8192} PDiskId# 1 2025-07-08T11:54:29.494307Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 8192} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.494653Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.593238Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:54:29.611819Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.612178Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl.cpp:299} Shutdown OwnerInfo# { PDisk system/log ChunkIds: {} Free ChunkIds: {} PDiskId# 1 2025-07-08T11:54:29.613140Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1 2025-07-08T11:54:29.646456Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.646486Z :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1 2025-07-08T11:54:29.646627Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 3515842838886827584 MagicLogChunk: 13792557234061457676 MagicDataChunk: 6401517732457125017 MagicSysLogChunk: 17196684568211561434 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669630282 (2025-07-08T11:54:29.630282Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.647885Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:29.648817Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:29.648833Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:29.649037Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:29.711011Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1274227 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:29.723569Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:29.724847Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 0 MagicNextLogChunkReference: 3515842838886827584 MagicLogChunk: 13792557234061457676 MagicDataChunk: 6401517732457125017 MagicSysLogChunk: 17196684568211561434 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975669630282 (2025-07-08T11:54:29.630282Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:29.733492Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1994141 NonceLog# 1274227 NonceData# 1583378} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:54:29.734283Z ... 880000 bytes (1 GB) Guid: 7350009799266665404 MagicNextLogChunkReference: 6145614263499812189 MagicLogChunk: 9240098977361939542 MagicDataChunk: 8614015039946168670 MagicSysLogChunk: 6700337242742007292 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975798839487 (2025-07-08T11:56:38.839487Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:56:49.002297Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 116413849 NonceLog# 115787492 NonceData# 116945749} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1861694 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:56:49.013281Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1849023 LastNonce# 115787496 MaxNonce# 0 sectorOffset# 2416640 PDiskId# 1 2025-07-08T11:56:49.013302Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 78 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 319488} PDiskId# 1 2025-07-08T11:56:49.013320Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 319488} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:56:49.017490Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:56:49.101189Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:56:49.153211Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:56:49.181058Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7350009799266665404 MagicNextLogChunkReference: 6145614263499812189 MagicLogChunk: 9240098977361939542 MagicDataChunk: 8614015039946168670 MagicSysLogChunk: 6700337242742007292 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975798839487 (2025-07-08T11:56:38.839487Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:56:49.190494Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 117820405 NonceLog# 117161935 NonceData# 118127528} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1861694 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:56:49.221063Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1849029 LastNonce# 117161939 MaxNonce# 0 sectorOffset# 2441216 PDiskId# 1 2025-07-08T11:56:49.221091Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 84 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 344064} PDiskId# 1 2025-07-08T11:56:49.221113Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 344064} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:56:49.227759Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:56:49.265379Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:56:49.363488Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:56:49.381100Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 7350009799266665404 MagicNextLogChunkReference: 6145614263499812189 MagicLogChunk: 9240098977361939542 MagicDataChunk: 8614015039946168670 MagicSysLogChunk: 6700337242742007292 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975798839487 (2025-07-08T11:56:38.839487Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:56:49.393070Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 118913084 NonceLog# 118953776 NonceData# 119716085} LogHeadChunkIdx# 26 LogHeadChunkPreviousNonce# 1861694 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:56:49.417624Z :BS_PDISK WARN: {BPD01@blobstorage_pdisk_sectorrestorator.cpp:86} Sector nonce reordering OwnerId# 0 IsErasureEncode# false ErasureDataParts# 4 Sector# 0 ReadNonce# 1849035 LastNonce# 118953780 MaxNonce# 0 sectorOffset# 2465792 PDiskId# 1 2025-07-08T11:56:49.417649Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 90 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 368640} PDiskId# 1 2025-07-08T11:56:49.417669Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 368640} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:56:49.429865Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:56:49.461365Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 2025-07-08T11:56:49.746506Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:56:49.757521Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11496825797442922489 MagicNextLogChunkReference: 12805918896372668709 MagicLogChunk: 18409134798324672645 MagicDataChunk: 11176362041517906764 MagicSysLogChunk: 11063125106668117710 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975809679490 (2025-07-08T11:56:49.679490Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:56:49.765188Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:56:49.777041Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:56:49.777088Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:56:49.777545Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:56:49.857186Z :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1073312 CutLogId# [0:0:0] ownerRound# 2 PDiskId# 1 2025-07-08T11:56:49.970534Z :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:56:49.985080Z :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 1658880000 bytes (1 GB) Guid: 11496825797442922489 MagicNextLogChunkReference: 12805918896372668709 MagicLogChunk: 18409134798324672645 MagicDataChunk: 11176362041517906764 MagicSysLogChunk: 11063125106668117710 MagicFormatChunk: 17332287817462050952 ChunkSize: 2097152 bytes (2 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975809679490 (2025-07-08T11:56:49.679490Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:56:49.995112Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 7 NonceSet# {TNonceSet Version# 0 NonceSysLog# 1980686 NonceLog# 1076917 NonceData# 1665411} LogHeadChunkIdx# 6 LogHeadChunkPreviousNonce# 1075860 Owner[3]# [0:4294967295:0:0:0]} PDiskId# 1 2025-07-08T11:56:50.022847Z :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 8 SectorIdx# 36 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 8 OffsetInChunk# 147456} PDiskId# 1 2025-07-08T11:56:50.022884Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 8 OffsetInChunk# 147456} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:56:50.033876Z :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:56:50.093173Z :BS_PDISK NOTICE: {BPD30@blobstorage_pdisk_impl.cpp:1817} Registered known VDisk VDisk# [0:4294967295:0:0:0] OwnerId# 3 OwnerRound# 2 PDiskId# 1 >> test.py::test[distinct-distinct_columns_after_group-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] >> test.py::test[select-multi_source_issue-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] >> test.py::test[pg-doubles_search_path-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] |61.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_parts [GOOD] >> test.py::test[count-count_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> test.py::test[blocks-date_sub_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval--Results] >> test.py::test[join-pullup_exclusion--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] >> test.py::test[join-pullup_exclusion--Results] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] >> test.py::test[flatten_by-flatten_with_group_by_expr--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[column_order-select_distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> test.py::test[window-full/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations--Results] >> test.py::test[optimizers-yql-3455_filter_sorted--Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> test.py::test[tpch-q15-default.txt-Results] [GOOD] >> test.py::test[udf-python_script_from_file--Results] [SKIPPED] >> test.py::test[udf-udaf_lambda-default.txt-Results] >> KqpUniqueIndex::InsertNullInFk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> KqpIndexes::MultipleSecondaryIndex+UseSink >> KqpIndexes::CheckUpsertNonEquatableType+NotNull >> TSequenceReboots::CopyTableWithSequence >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151976376.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151976376.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=151976376.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=151976376.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151976376.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=131976376.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=151976376.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=151976376.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=131975176.000000s;Name=;Codec=}; 2025-07-08T11:56:16.980276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:56:16.986545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:56:16.986600Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:56:16.990123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:56:16.990169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:56:16.990209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:56:16.990224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:56:16.990237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:56:16.990254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:56:16.990267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:56:16.990281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:56:16.990295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:56:16.990309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:56:16.990322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:56:16.990339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:56:16.999752Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:56:16.999830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:56:16.999838Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:56:16.999865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:56:17.001633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:56:17.001647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:56:17.001651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:56:17.001660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:56:17.001666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:56:17.001672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:56:17.001676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:56:17.001690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:56:17.001695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:56:17.001701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:56:17.001704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:56:17.001711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:56:17.001716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:56:17.001722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:56:17.001725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:56:17.001731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:56:17.001737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:56:17.001740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:56:17.001757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:56:17.001762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:56:17.001765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:56:17.001779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:56:17.001785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:56:17.001788Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:56:17.001798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:56:17.001803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:56:17.001806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:56:17.001812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:56:17.001817Z node 1 :TX_COLUMNSHARD WARN: tablet ... ne=constructor_meta.cpp:65;memory_size=206;data_size=180;sum=10014;count=98;size_of_meta=112; 2025-07-08T11:56:56.145412Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=13542;count=49;size_of_portion=184; 2025-07-08T11:56:56.145432Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=496; 2025-07-08T11:56:56.145437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=0; 2025-07-08T11:56:56.145458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=15; 2025-07-08T11:56:56.145462Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=540; 2025-07-08T11:56:56.145465Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=548; 2025-07-08T11:56:56.145469Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=0; 2025-07-08T11:56:56.145479Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=5; 2025-07-08T11:56:56.145483Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=641; 2025-07-08T11:56:56.145496Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=8; 2025-07-08T11:56:56.145507Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-07-08T11:56:56.145522Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=11; 2025-07-08T11:56:56.145532Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=5; 2025-07-08T11:56:56.146221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=683; 2025-07-08T11:56:56.146877Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=637; 2025-07-08T11:56:56.146895Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-07-08T11:56:56.146902Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:56:56.146907Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:56:56.146919Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=6; 2025-07-08T11:56:56.146924Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:56:56.146936Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2025-07-08T11:56:56.146941Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:56:56.146948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2025-07-08T11:56:56.146956Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=5; 2025-07-08T11:56:56.146991Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=30; 2025-07-08T11:56:56.146995Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2993; 2025-07-08T11:56:56.147015Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=24365192;raw_bytes=35131129;count=5;records=400000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:56:56.147032Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:56:56.147038Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:56:56.147047Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:56:56.148856Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=1; 2025-07-08T11:56:56.148879Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:56:56.148897Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=4; 2025-07-08T11:56:56.148908Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975514675;tx_id=18446744073709551615;;current_snapshot_ts=1751975778266; 2025-07-08T11:56:56.148915Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:56:56.148923Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:56:56.148927Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:56:56.148942Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:56:56.149460Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:56:56.149694Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:56:56.149700Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:56:56.149703Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:56:56.149708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:56:56.149722Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=4; 2025-07-08T11:56:56.149730Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975514675;tx_id=18446744073709551615;;current_snapshot_ts=1751975778266; 2025-07-08T11:56:56.149736Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:56:56.149743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:56:56.149746Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:56:56.149759Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1745:3646];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 8266, msgbus: 28782 2025-07-08T11:56:27.349941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678709730395880:2188];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:27.350089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c2a/r3tmp/tmp6pcQs8/pdisk_1.dat 2025-07-08T11:56:27.413907Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8266, node 1 2025-07-08T11:56:27.431124Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:27.431137Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:27.431139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:27.431176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:56:27.450772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:27.451022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:28782 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-07-08T11:56:27.452815Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Handle TEvNavigate describe path dc-1 2025-07-08T11:56:27.452842Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396415:2436] HANDLE EvNavigateScheme dc-1 2025-07-08T11:56:27.453248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:56:27.453534Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396415:2436] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:27.463025Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396415:2436] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-07-08T11:56:27.465076Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396415:2436] Handle TEvDescribeSchemeResult Forward to# [1:7524678709730396413:2434] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-07-08T11:56:27.467712Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Handle TEvProposeTransaction 2025-07-08T11:56:27.467726Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-07-08T11:56:27.482678Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:56:27.483308Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:56:27.483318Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T11:56:27.483350Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7524678709730396434:2448] 2025-07-08T11:56:27.491263Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:27.491295Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:56:27.491299Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:27.491312Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:27.491407Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:27.491433Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-07-08T11:56:27.491445Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T11:56:27.491505Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T11:56:27.491675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:56:27.492326Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T11:56:27.492339Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396434:2448] txid# 281474976715657 SEND to# [1:7524678709730396429:2443] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-07-08T11:56:27.499450Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Handle TEvProposeTransaction 2025-07-08T11:56:27.499472Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:56:27.499482Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678709730396474:2484] 2025-07-08T11:56:27.500098Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:27.500111Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:56:27.500113Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:27.500125Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:27.500193Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:27.500209Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:27.500218Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:56:27.500255Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:56:27.500339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:56:27.500938Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:56:27.500965Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396474:2484] txid# 281474976715658 SEND to# [1:7524678709730396473:2483] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:56:27.511279Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Handle TEvProposeTransaction 2025-07-08T11:56:27.511295Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:56:27.511313Z node 1 :TX_PROXY DEBUG: actor# [1:7524678709730395947:2138] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7524678709730396492:2494] 2025-07-08T11:56:27.511873Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678709730396492:2494] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModi ... 4976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:55.607637Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:56:55.607640Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:55.607657Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:55.607738Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:55.607759Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:55.607770Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:56:55.607800Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:56:55.607911Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:56:55.608494Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:56:55.608501Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189049:2474] txid# 281474976715658 SEND to# [59:7524678830922189048:2473] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:56:55.629060Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Handle TEvProposeTransaction 2025-07-08T11:56:55.629080Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:56:55.629099Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7524678830922189067:2484] 2025-07-08T11:56:55.629929Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37146" 2025-07-08T11:56:55.629944Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:56:55.629948Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:55.629961Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:55.630050Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:55.630072Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:55.630082Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-07-08T11:56:55.630120Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 HANDLE EvClientConnected 2025-07-08T11:56:55.630235Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T11:56:55.641414Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2025-07-08T11:56:55.641435Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189067:2484] txid# 281474976715659 SEND to# [59:7524678830922189066:2286] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-07-08T11:56:55.667743Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Handle TEvProposeTransaction 2025-07-08T11:56:55.667758Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] TxId# 281474976715660 ProcessProposeTransaction 2025-07-08T11:56:55.667773Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [59:7524678830922189089:2495] 2025-07-08T11:56:55.668560Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37146" 2025-07-08T11:56:55.668571Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:56:55.668574Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:55.668585Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:55.668664Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:55.668683Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:55.668692Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-07-08T11:56:55.668734Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 HANDLE EvClientConnected 2025-07-08T11:56:55.672011Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715660} 2025-07-08T11:56:55.672024Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189089:2495] txid# 281474976715660 SEND to# [59:7524678830922189088:2288] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-07-08T11:56:55.690729Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Handle TEvProposeTransaction 2025-07-08T11:56:55.690742Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] TxId# 281474976715661 ProcessProposeTransaction 2025-07-08T11:56:55.690754Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7524678830922189116:2507] 2025-07-08T11:56:55.691602Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:37146" 2025-07-08T11:56:55.691615Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:56:55.691618Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-07-08T11:56:55.691654Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-07-08T11:56:55.691660Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-07-08T11:56:55.691668Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:55.691716Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:55.691720Z node 59 :TX_PROXY ERROR: Actor# [59:7524678830922189116:2507] txid# 281474976715661, Access denied for ordinaryuser@builtin, attempt to manage user 2025-07-08T11:56:55.691736Z node 59 :TX_PROXY ERROR: Actor# [59:7524678830922189116:2507] txid# 281474976715661, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-07-08T11:56:55.691740Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678830922189116:2507] txid# 281474976715661 SEND to# [59:7524678830922189115:2296] Source {TEvProposeTransactionStatus Status# 5} 2025-07-08T11:56:55.691809Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MTRhMDFmNGYtZjU3NWVjZDktYzA3YjFhOC02NzY1MmM2ZA==, ActorId: [59:7524678830922189106:2296], ActorState: ExecuteState, TraceId: 01jzmyatg7fpv5k0f5d5jjkw2v, Create QueryResponse for error on request, msg: 2025-07-08T11:56:55.691903Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] Handle TEvExecuteKqpTransaction 2025-07-08T11:56:55.691906Z node 59 :TX_PROXY DEBUG: actor# [59:7524678826627221164:2108] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-07-08T11:56:55.905234Z node 59 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test.py::test[in-in_ansi_join--Results] [GOOD] >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull >> test.py::test[blocks-date_sub_interval--Results] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> test.py::test[join-pullup_exclusion--Results] [GOOD] >> test.py::test[join-pullup_inner-off-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-uuid--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFk >> test.py::test[select-calculated_values-default.txt-Results] [GOOD] >> test.py::test[select-dict_lookup_by_key-default.txt-Results] >> test.py::test[pg-tpcds-q23-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[aggregate-group_by_session_only_distinct--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_only_distinct--Results] >> test.py::test[flatten_by-flatten_with_group_by_expr--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleModifications >> KqpRanges::NullInKey >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> test.py::test[pg-tpcds-q43-default.txt-ForceBlocks] [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView >> test.py::test[pg-tpcds-q43-default.txt-Results] >> test.py::test[dq-read_cost_native-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] |61.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 |61.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |61.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> KqpIndexes::MultipleModifications [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[join-mergejoin_small_primary--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleModifications [GOOD] Test command err: Trying to start YDB, gRPC: 22424, MsgBus: 15599 2025-07-08T11:56:55.476453Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678832506248272:2215];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:55.476479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002079/r3tmp/tmpfx0V3h/pdisk_1.dat 2025-07-08T11:56:55.899334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:55.899360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:55.900967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22424, node 1 2025-07-08T11:56:55.945485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:56.057290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:56.057303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:56.057305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:56.057344Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15599 TClient is connected to server localhost:15599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:56:56.401782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.404647Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:56:56.419965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.462308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.494008Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:56:56.514329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.549811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.690791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.712547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.736555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.757072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.781429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.801104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.814336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.990406Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7524678836801217927:3582], Recipient [1:7524678832506248533:2209]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T11:56:56.990426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T11:56:56.990429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T11:56:56.990437Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7524678836801217923:3579], Recipient [1:7524678832506248533:2209]: {TEvModifySchemeTransaction txid# 281474976710670 TabletId# 72057594046644480} 2025-07-08T11:56:56.990438Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T11:56:57.015312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "TestTable" Columns { Name: "Key" Type: "String" NotNull: false } Columns { Name: "Value1" Type: "String" NotNull: false } Columns { Name: "Value2" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } Temporary: false } IndexDescription { Name: "Index1" KeyColumnNames: "Value1" Type: EIndexTypeGlobal IndexImplTableDescriptions { PartitionConfig { } } } IndexDescription { Name: "Index2" KeyColumnNames: "Value2" Type: EIndexTypeGlobal IndexImplTableDescriptions { PartitionConfig { } } } } } TxId: 281474976710670 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:60620" , at schemeshard: 72057594046644480 2025-07-08T11:56:57.015397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /Root/TestTable domain path id: [OwnerId: 72057594046644480, LocalPathId: 1] domain path: /Root shardsToCreate: 3 GetShardsInside: 34 MaxShards: 200000 2025-07-08T11:56:57.015462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.015484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/TestTable, opId: 281474976710670:0, schema: Name: "TestTable" Columns { Name: "Key" Type: "String" NotNull: false } Columns { Name: "Value1" Type: "String" NotNull: false } Columns { Name: "Value2" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } Temporary: false, at schemeshard: 72057594046644480 2025-07-08T11:56:57.015545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: TestTable, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-07-08T11:56:57.015561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-07-08T11:56:57.015566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-07-08T11:56:57.015590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-07-08T11:56:57.015597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710670:0 1 -> 2 2025-07-08T11:56:57.015763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710670:0 path# /Root/TestTable pathId# [OwnerId: 72057594046644480, LocalPathId: 13] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "TestTable" Columns { Name: "Key" Type: "String" NotNull: false } Columns { Name: "Value1" Type: "String" NotNull: false } Columns { Name: "Value2" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } Temporary: false } Internal: false FailOnExist: false AllowCreateInTempDir: false 2025-07-08T11:56:57.015777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710670:5, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T11:56:57.015780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.015793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/TestTable/Index1, operationId: 281474976710670:1, transaction: WorkingDir: "/Root/TestTable" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "Index1" KeyColumnNames: "Value1" Type: EIndexTypeGlobal IndexImplTableDescriptions { PartitionConfig { } } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-07-08T11:56:57.015818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 13], parent name: TestTable, child name: Index1, child id: [OwnerId: 72057594046644480, LocalPathId: 14], at schemeshard: 72057594046644480 2025-07-08T11:56:57.015827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 14] was 0 2025-07-08T11:56:57.015829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ... 2057594046644480:35, shard: 72075186224037923, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598866Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598868Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715670:0, datashard: 72075186224037923, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598871Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715670:0 129 -> 240 2025-07-08T11:57:01.598884Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T11:57:01.598905Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:7524678856897002590:2449], Recipient [3:7524678852602033091:2146]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 7524678856897002590 RawX2: 4503612512274833 } Origin: 72075186224037922 State: 2 TxId: 281474976715670 Step: 0 Generation: 1 2025-07-08T11:57:01.598906Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-07-08T11:57:01.598910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7524678856897002590 RawX2: 4503612512274833 } Origin: 72075186224037922 State: 2 TxId: 281474976715670 Step: 0 Generation: 1 2025-07-08T11:57:01.598911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715670, tablet: 72075186224037922, partId: 2 2025-07-08T11:57:01.598917Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715670:2, at schemeshard: 72057594046644480, message: Source { RawX1: 7524678856897002590 RawX2: 4503612512274833 } Origin: 72075186224037922 State: 2 TxId: 281474976715670 Step: 0 Generation: 1 2025-07-08T11:57:01.598919Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715670:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2025-07-08T11:57:01.598923Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715670:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7524678856897002590 RawX2: 4503612512274833 } Origin: 72075186224037922 State: 2 TxId: 281474976715670 Step: 0 Generation: 1 2025-07-08T11:57:01.598926Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715670:2, shardIdx: 72057594046644480:36, shard: 72075186224037922, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598927Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598928Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715670:2, datashard: 72075186224037922, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598930Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715670:2 129 -> 240 2025-07-08T11:57:01.598936Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T11:57:01.598973Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598974Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T11:57:01.598975Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715670:0 2025-07-08T11:57:01.598981Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:7524678856897002596:2450] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715670 at schemeshard: 72057594046644480 2025-07-08T11:57:01.598992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T11:57:01.598992Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T11:57:01.598993Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715670:2 2025-07-08T11:57:01.598996Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:7524678856897002590:2449] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715670 at schemeshard: 72057594046644480 2025-07-08T11:57:01.599002Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:7524678852602033091:2146], Recipient [3:7524678852602033091:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:57:01.599003Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:57:01.599006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.599008Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715670:0 ProgressState 2025-07-08T11:57:01.599012Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T11:57:01.599014Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715670:0 progress is 2/3 2025-07-08T11:57:01.599015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715670 ready parts: 2/3 2025-07-08T11:57:01.599017Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715670:0 progress is 2/3 2025-07-08T11:57:01.599018Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715670 ready parts: 2/3 2025-07-08T11:57:01.599020Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715670, ready parts: 2/3, is published: true 2025-07-08T11:57:01.599030Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:7524678852602033091:2146], Recipient [3:7524678852602033091:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:57:01.599031Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:57:01.599033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T11:57:01.599034Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715670:2 ProgressState 2025-07-08T11:57:01.599036Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T11:57:01.599037Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715670:2 progress is 3/3 2025-07-08T11:57:01.599039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715670 ready parts: 3/3 2025-07-08T11:57:01.599040Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715670:2 progress is 3/3 2025-07-08T11:57:01.599050Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715670 ready parts: 3/3 2025-07-08T11:57:01.599051Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715670, ready parts: 3/3, is published: true 2025-07-08T11:57:01.599057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:7524678856897002553:2447] message: TxId: 281474976715670 2025-07-08T11:57:01.599059Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715670 ready parts: 3/3 2025-07-08T11:57:01.599062Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715670:0 2025-07-08T11:57:01.599064Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715670:0 2025-07-08T11:57:01.599083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 4 2025-07-08T11:57:01.599085Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715670:1 2025-07-08T11:57:01.599086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715670:1 2025-07-08T11:57:01.599088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 14] was 3 2025-07-08T11:57:01.599090Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715670:2 2025-07-08T11:57:01.599090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715670:2 2025-07-08T11:57:01.599095Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-07-08T11:57:01.599166Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T11:57:01.599170Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T11:57:01.599179Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:7524678856897002553:2447] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715670 at schemeshard: 72057594046644480 2025-07-08T11:57:01.599651Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:7524678856897002568:3594], Recipient [3:7524678852602033091:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.599656Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.599658Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T11:57:01.605222Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:7524678856897002666:3665], Recipient [3:7524678852602033091:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.605231Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.605233Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T11:57:01.605236Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:7524678856897002665:3664], Recipient [3:7524678852602033091:2146]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.605237Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:57:01.605238Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T11:57:01.813138Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:01.834038Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:7524678852602033091:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-07-08T11:57:01.834056Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-07-08T11:57:01.834065Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:7524678852602033091:2146], Recipient [3:7524678852602033091:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-07-08T11:57:01.834068Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] >> test.py::test[udf-two_regexps--Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 >> test.py::test[pg-tpcds-q32-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q32-default.txt-Results] >> test.py::test[view-system_udf--Results] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> test.py::test[optimizers-yql-7767_key_filter_with_view--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] >> test.py::test[aggregate-group_by_session_only_distinct--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] >> test.py::test[udf-udaf_lambda-default.txt-Results] [GOOD] >> test.py::test[udf-udaf_short--Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 30042, MsgBus: 12269 2025-07-08T11:56:55.851495Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678829683585654:2062];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:55.851511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002075/r3tmp/tmpod1rRp/pdisk_1.dat 2025-07-08T11:56:56.222598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:56.222624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:56.230263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30042, node 1 2025-07-08T11:56:56.245652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:56.280995Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:56.281009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:56.281011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:56.281056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12269 TClient is connected to server localhost:12269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:56:56.502143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.509353Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:56:56.534498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.582552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:56:56.611445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.639169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.865118Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:56:56.909976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.932515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.992970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.012459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.033275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.063880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.101546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:56:57.507110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10355, MsgBus: 61258 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002075/r3tmp/tmpdKr76L/pdisk_1.dat 2025-07-08T11:56:58.457450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:56:58.472331Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:58.489417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:58.489445Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:58.490873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10355, node 2 2025-07-08T11:56:58.543621Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:58.543630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:58.543632Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:58.543666Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61258 TClient is connected to server localhost:61258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:56:58.862278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:58.865390Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:56:58.870749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:58.976295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.064740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.114109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.373223Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:56:59.557487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.586048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.648870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.681544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.747506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.768735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:56:59.795134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.221648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.359097Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 28701, MsgBus: 28581 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002075/r3tmp/tmppbJKvw/pdisk_1.dat 2025-07-08T11:57:00.821317Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524678851776616185:2087];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:00.822670Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:57:00.876372Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28701, node 3 2025-07-08T11:57:00.898647Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:00.898662Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:00.898664Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:00.898702Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:00.905335Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:00.905359Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:00.909230Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28581 TClient is connected to server localhost:28581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:01.009538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.011326Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:01.024010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.043910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.092171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.168092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.360276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.385457Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.413327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.445332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.481131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.510395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.578801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.805070Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:01.820337Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:01.901211Z node 3 :TX_PROXY ERROR: Actor# [3:7524678856071586221:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 13], type: EPathTypeTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:484" severity: 1 } >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings >> test.py::test[count-count_nullable_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_nullable_sub-default.txt-Results] >> test.py::test[blocks-date_greater_or_equal_scalar--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] >> test.py::test[window-full/aggregations--Results] [GOOD] >> test.py::test[window-full/session--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 3383, MsgBus: 31366 2025-07-08T11:56:55.204129Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678829875835444:2238];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:55.204316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00207e/r3tmp/tmpNq7gJ7/pdisk_1.dat 2025-07-08T11:56:55.633271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:55.633299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:55.637376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:56:55.674240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3383, node 1 2025-07-08T11:56:55.852495Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:55.852505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:55.852507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:55.852544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31366 2025-07-08T11:56:56.133040Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:31366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:56:56.266228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.277975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.372839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.458047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.474967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:56.563329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.585160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.612053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.628185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.688998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.711108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.724727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:56:56.909821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 20068, MsgBus: 24945 2025-07-08T11:56:59.524886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524678848164550508:2058];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:59.524896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00207e/r3tmp/tmpbeI6gs/pdisk_1.dat 2025-07-08T11:56:59.653657Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20068, node 2 2025-07-08T11:56:59.677064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:59.677076Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:59.677077Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:59.677112Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:56:59.724268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:59.724296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:59.729392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24945 TClient is connected to server localhost:24945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:56:59.789635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.793422Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:56:59.809858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.837055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.910997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:56:59.933427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:00.074519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.090003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.108753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.128771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.142296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.169491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.187046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:00.521745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:00.585154Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 6694, MsgBus: 11980 2025-07-08T11:57:01.469024Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524678858117611732:2167];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:01.469726Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00207e/r3tmp/tmpIXKMWz/pdisk_1.dat 2025-07-08T11:57:01.522706Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6694, node 3 2025-07-08T11:57:01.548499Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:01.548509Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:01.548511Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:01.548553Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:01.587760Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:01.587785Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:01.593296Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11980 TClient is connected to server localhost:11980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:01.713655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.717329Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:01.737486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.769529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.825694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:01.893258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.085175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.117959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.139221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.156516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.176675Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.201376Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.234001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:02.470076Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:02.513855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... >> test.py::test[pg-tpcds-q43-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-Results] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> test.py::test[window-win_by_all_aggregate--ForceBlocks] [GOOD] >> test.py::test[window-win_by_all_aggregate--Results] >> test.py::test[join-mergejoin_small_primary--Results] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> BackupRestoreS3::RestoreViewQueryText >> test.py::test[pg-tpcds-q32-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> test.py::test[select-dict_lookup_by_key-default.txt-Results] [GOOD] >> test.py::test[select-exists_false-default.txt-Results] >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings >> BackupRestore::RestoreViewQueryText |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |62.0%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> test.py::test[join-pullup_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner-off-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 13855, msgbus: 5886 2025-07-08T11:56:31.485275Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678726792382886:2242];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:31.486488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001cd0/r3tmp/tmpQS6cwZ/pdisk_1.dat 2025-07-08T11:56:31.550218Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13855, node 1 2025-07-08T11:56:31.569055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:31.569066Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:31.569067Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:31.569104Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:56:31.582684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:31.582714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:5886 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-07-08T11:56:31.584533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:56:31.585526Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Handle TEvNavigate describe path dc-1 2025-07-08T11:56:31.585550Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383348:2431] HANDLE EvNavigateScheme dc-1 2025-07-08T11:56:31.585755Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383348:2431] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:31.593159Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383348:2431] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-07-08T11:56:31.595044Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383348:2431] Handle TEvDescribeSchemeResult Forward to# [1:7524678726792383347:2430] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-07-08T11:56:31.607001Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Handle TEvProposeTransaction 2025-07-08T11:56:31.607015Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-07-08T11:56:31.621495Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:56:31.622192Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:56:31.622198Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T11:56:31.622233Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7524678726792383365:2441] 2025-07-08T11:56:31.633061Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:31.633097Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-07-08T11:56:31.633102Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:31.633116Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:31.633215Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:31.633242Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-07-08T11:56:31.633257Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T11:56:31.633296Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T11:56:31.633479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:56:31.634208Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T11:56:31.634222Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383365:2441] txid# 281474976715657 SEND to# [1:7524678726792383360:2436] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-07-08T11:56:31.636454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:56:31.645335Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Handle TEvProposeTransaction 2025-07-08T11:56:31.645347Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:56:31.645363Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678726792383403:2475] 2025-07-08T11:56:31.645917Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:31.645933Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-07-08T11:56:31.645936Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:31.645945Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:31.646011Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:31.646037Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:31.646048Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:56:31.646084Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:56:31.646174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:56:31.646897Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:56:31.646910Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678726792383403:2475] txid# 281474976715658 SEND to# [1:7524678726792383402:2474] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:56:31.657607Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Handle TEvProposeTransaction 2025-07-08T11:56:31.657623Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:56:31.657685Z node 1 :TX_PROXY DEBUG: actor# [1:7524678726792382858:2138] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7524678726792383421:2485] 2025-07-08T11:56:31.658182Z node 1 :TX_PROXY DEBUG: Acto ... 0262Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:57:08.250266Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:08.250283Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:08.250372Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:08.250391Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:08.250402Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:57:08.250438Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:57:08.250547Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:08.253423Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:57:08.253444Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781178:2473] txid# 281474976715658 SEND to# [59:7524678887870781177:2472] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:57:08.546438Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Handle TEvProposeTransaction 2025-07-08T11:57:08.546455Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:57:08.546471Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7524678887870781218:2487] 2025-07-08T11:57:08.547329Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57834" 2025-07-08T11:57:08.547347Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:57:08.547351Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:08.547364Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:08.547460Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:08.547485Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:08.547500Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-07-08T11:57:08.547546Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 HANDLE EvClientConnected 2025-07-08T11:57:08.553762Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2025-07-08T11:57:08.553779Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781218:2487] txid# 281474976715659 SEND to# [59:7524678887870781217:2287] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-07-08T11:57:08.676292Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Handle TEvProposeTransaction 2025-07-08T11:57:08.676308Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] TxId# 281474976715660 ProcessProposeTransaction 2025-07-08T11:57:08.676322Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [59:7524678887870781240:2503] 2025-07-08T11:57:08.677187Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57834" 2025-07-08T11:57:08.677209Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:57:08.677213Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:08.677230Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:08.677322Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:08.677347Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:08.677359Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-07-08T11:57:08.677394Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 HANDLE EvClientConnected 2025-07-08T11:57:08.677512Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:57:08.678375Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715660} 2025-07-08T11:57:08.678385Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781240:2503] txid# 281474976715660 SEND to# [59:7524678887870781239:2292] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-07-08T11:57:08.690560Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Handle TEvProposeTransaction 2025-07-08T11:57:08.690584Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] TxId# 281474976715661 ProcessProposeTransaction 2025-07-08T11:57:08.690594Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7524678887870781266:2515] 2025-07-08T11:57:08.691398Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1MjAxOTAyOCwiaWF0IjoxNzUxOTc1ODI4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.OT2T-Ud4IaqqFo9l2SLEm8tmahsxQDJhbGz0kgt1o2KSSe3RoBfG0BcBQ0scGq13Nx3SVWTyQ4rr9AX1Nj0GMzZSsKiJpBUfcHVpTdb0h6Q2oVgM61YEo34RkQu1FDo_jQ6cnCFi577vspGHbSk5R_CvxOlLjgBdy8VboPOobtTy0eAKx-IGW_yLwmA2Vt57FlSK7QmSVFitO1i0E3A2ILobXXTSRYruAN4uftkS2iRYnjpH5CdSL6HZ-owe-CQI_M_qtSZI49ghFCGOHUCNMyudYWag1CEs-pl5AIs4CxFNATANdIaQ8fXD-r0kKV84P8C1se-IBbEfJmy1eec8EA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc1MjAxOTAyOCwiaWF0IjoxNzUxOTc1ODI4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:57834" 2025-07-08T11:57:08.691413Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-07-08T11:57:08.691418Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-07-08T11:57:08.691461Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-07-08T11:57:08.691469Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-07-08T11:57:08.691478Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:08.691536Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:08.691540Z node 59 :TX_PROXY ERROR: Actor# [59:7524678887870781266:2515] txid# 281474976715661, Access denied for ordinaryuser, attempt to manage user 2025-07-08T11:57:08.691560Z node 59 :TX_PROXY ERROR: Actor# [59:7524678887870781266:2515] txid# 281474976715661, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-07-08T11:57:08.691564Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678887870781266:2515] txid# 281474976715661 SEND to# [59:7524678887870781265:2297] Source {TEvProposeTransactionStatus Status# 5} 2025-07-08T11:57:08.691684Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=M2RlOWUyZDAtZTgzODA4ZTMtM2M5NmQzZDgtODBkMzRhN2I=, ActorId: [59:7524678887870781256:2297], ActorState: ExecuteState, TraceId: 01jzmyb76gft9h05w9jhyeg6jk, Create QueryResponse for error on request, msg: 2025-07-08T11:57:08.691763Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] Handle TEvExecuteKqpTransaction 2025-07-08T11:57:08.691768Z node 59 :TX_PROXY DEBUG: actor# [59:7524678883575813165:2114] TxId# 281474976715662 ProcessProposeKqpTransaction >> test.py::test[join-pullup_inner-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-ForceBlocks] >> TPDiskRaces::OwnerRecreationRaces [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLog >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> test.py::test[count-count_nullable_sub-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_join--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_join--Results] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::NoFullScanAtScanQuery >> test.py::test[join-pullup_exclusion-off-ForceBlocks] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTableReadReplicasSettings >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex |62.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> test.py::test[blocks-date_sub_interval_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_sub_interval_scalar--Results] >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] Test command err: Starting YDB, grpc: 23099, msgbus: 63625 2025-07-08T11:56:37.708890Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678753636593758:2075];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:37.708921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001d19/r3tmp/tmpz8v6jY/pdisk_1.dat 2025-07-08T11:56:37.755145Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23099, node 1 2025-07-08T11:56:37.778624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:37.778635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:37.778636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:37.778672Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63625 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-07-08T11:56:37.795749Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Handle TEvNavigate describe path dc-1 2025-07-08T11:56:37.795775Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594398:2426] HANDLE EvNavigateScheme dc-1 2025-07-08T11:56:37.796042Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594398:2426] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:37.802689Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594398:2426] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-07-08T11:56:37.804740Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594398:2426] Handle TEvDescribeSchemeResult Forward to# [1:7524678753636594397:2425] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-07-08T11:56:37.807748Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Handle TEvProposeTransaction 2025-07-08T11:56:37.807762Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-07-08T11:56:37.809981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:37.810001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:37.812519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:56:37.832414Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:56:37.833280Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:56:37.833295Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T11:56:37.833341Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7524678753636594421:2441] 2025-07-08T11:56:37.845111Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:37.845148Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:56:37.845152Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:37.845166Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:37.845268Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:37.845297Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-07-08T11:56:37.845306Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T11:56:37.845343Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T11:56:37.845558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:56:37.847402Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T11:56:37.847423Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594421:2441] txid# 281474976715657 SEND to# [1:7524678753636594410:2431] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-07-08T11:56:37.876465Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Handle TEvProposeTransaction 2025-07-08T11:56:37.876480Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:56:37.876487Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678753636594459:2475] 2025-07-08T11:56:37.877078Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:37.877089Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:56:37.877091Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:37.877102Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:37.877170Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:37.877184Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:37.877192Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:56:37.877234Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:56:37.877344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:56:37.881547Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:56:37.881570Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594459:2475] txid# 281474976715658 SEND to# [1:7524678753636594458:2474] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:56:37.889428Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Handle TEvProposeTransaction 2025-07-08T11:56:37.889445Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:56:37.889459Z node 1 :TX_PROXY DEBUG: actor# [1:7524678753636593933:2139] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7524678753636594477:2485] 2025-07-08T11:56:37.890165Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678753636594477:2485] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpMo ... ion, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:11.837345Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Handle TEvProposeTransaction 2025-07-08T11:57:11.837358Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:57:11.837369Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7524678901664392993:2473] 2025-07-08T11:57:11.838073Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:57:11.838087Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:11.838090Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:11.838107Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:11.838198Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:11.838221Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:11.838233Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:57:11.838277Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:57:11.838565Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:11.845156Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:57:11.845176Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664392993:2473] txid# 281474976715658 SEND to# [59:7524678901664392992:2472] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:57:11.866589Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Handle TEvProposeTransaction 2025-07-08T11:57:11.866606Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:57:11.866631Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7524678901664393011:2483] 2025-07-08T11:57:11.867338Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54388" 2025-07-08T11:57:11.867354Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:11.867358Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:11.867370Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:11.867454Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:11.867478Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:11.867488Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-07-08T11:57:11.867532Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 HANDLE EvClientConnected 2025-07-08T11:57:11.867664Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T11:57:11.871138Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2025-07-08T11:57:11.871154Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678901664393011:2483] txid# 281474976715659 SEND to# [59:7524678901664393010:2278] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-07-08T11:57:12.007296Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Handle TEvProposeTransaction 2025-07-08T11:57:12.007312Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] TxId# 281474976715660 ProcessProposeTransaction 2025-07-08T11:57:12.007327Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [59:7524678905959360342:2497] 2025-07-08T11:57:12.007922Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54388" 2025-07-08T11:57:12.007934Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:12.007938Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:12.007949Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:12.008030Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:12.008051Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:12.008061Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-07-08T11:57:12.008101Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 HANDLE EvClientConnected 2025-07-08T11:57:12.013088Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715660} 2025-07-08T11:57:12.013103Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360342:2497] txid# 281474976715660 SEND to# [59:7524678905959360341:2288] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 48} 2025-07-08T11:57:12.024778Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Handle TEvProposeTransaction 2025-07-08T11:57:12.024789Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] TxId# 281474976715661 ProcessProposeTransaction 2025-07-08T11:57:12.024799Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7524678905959360369:2509] 2025-07-08T11:57:12.025447Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\025cluster_admin@builtin\022\030\022\026\n\024all-users@well-known\032\025cluster_admin@builtin\"\007Builtin*\027clus****ltin (2AB0E265)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54388" 2025-07-08T11:57:12.025462Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 Bootstrap, UserSID: cluster_admin@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:12.025465Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 Bootstrap, UserSID: cluster_admin@builtin IsClusterAdministrator: 1 2025-07-08T11:57:12.025476Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:12.025554Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:12.025563Z node 59 :TX_PROXY ERROR: Actor# [59:7524678905959360369:2509] txid# 281474976715661, Access denied for cluster_admin@builtin on path /dc-1, with access AlterSchema 2025-07-08T11:57:12.025580Z node 59 :TX_PROXY ERROR: Actor# [59:7524678905959360369:2509] txid# 281474976715661, issues: { message: "Access denied for cluster_admin@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-07-08T11:57:12.025583Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678905959360369:2509] txid# 281474976715661 SEND to# [59:7524678905959360368:2296] Source {TEvProposeTransactionStatus Status# 5} 2025-07-08T11:57:12.025684Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=ZmZlNTY0YWEtYTNmMWE5MTAtNDU4MDMxNzMtMTdkZWI4MTM=, ActorId: [59:7524678905959360359:2296], ActorState: ExecuteState, TraceId: 01jzmybaep9rhqmfmvhz3k48yp, Create QueryResponse for error on request, msg: 2025-07-08T11:57:12.025784Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] Handle TEvExecuteKqpTransaction 2025-07-08T11:57:12.025787Z node 59 :TX_PROXY DEBUG: actor# [59:7524678901664392250:2099] TxId# 281474976715662 ProcessProposeKqpTransaction >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> BackupRestoreS3::RestoreIndexTableReadReplicasSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries |62.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure >> SharedThreads::RegistrationAndPassingAwayActorsTailStrictPool [GOOD] >> SharedThreads::RegistrationAndPassingAwayActorsLazyStrictPool >> test.py::test[pg-tpcds-q34-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q34-default.txt-Results] >> test.py::test[epochs-use_sorted_by_complex_type--ForceBlocks] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[udf-udaf_short--Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--Results] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> KqpRanges::NoFullScanAtScanQuery [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate >> test.py::test[insert-append_sorted-to_sorted_calc-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] >> test.py::test[join-mergejoin_small_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_small_primary-off-Results] [SKIPPED] >> test.py::test[join-opt_on_opt_side--ForceBlocks] |62.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 17358, msgbus: 62610 2025-07-08T11:56:27.891508Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678712378894830:2242];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:56:27.891582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c48/r3tmp/tmpPPI8MD/pdisk_1.dat 2025-07-08T11:56:27.950699Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17358, node 1 2025-07-08T11:56:27.988195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:56:27.988210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:56:27.988212Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:56:27.988251Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:56:27.990050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:56:27.990081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:56:27.992406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62610 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-07-08T11:56:28.002858Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Handle TEvNavigate describe path dc-1 2025-07-08T11:56:28.002874Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862581:2427] HANDLE EvNavigateScheme dc-1 2025-07-08T11:56:28.003050Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862581:2427] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:28.008321Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862581:2427] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-07-08T11:56:28.009706Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862581:2427] Handle TEvDescribeSchemeResult Forward to# [1:7524678716673862580:2426] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-07-08T11:56:28.014212Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Handle TEvProposeTransaction 2025-07-08T11:56:28.014222Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Cookie# 0 userReqId# "" DELAY REQUEST, wait txids from allocator Type# Scheme 2025-07-08T11:56:28.022633Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:56:28.023532Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:56:28.023543Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T11:56:28.023579Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7524678716673862598:2437] 2025-07-08T11:56:28.035331Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:28.035370Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-07-08T11:56:28.035374Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:28.035393Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:28.035512Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:28.035548Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-07-08T11:56:28.035565Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T11:56:28.035617Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T11:56:28.035799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:56:28.037227Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T11:56:28.037240Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862598:2437] txid# 281474976715657 SEND to# [1:7524678716673862593:2432] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-07-08T11:56:28.069984Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Handle TEvProposeTransaction 2025-07-08T11:56:28.070000Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:56:28.070014Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678716673862638:2473] 2025-07-08T11:56:28.070728Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-07-08T11:56:28.070751Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-07-08T11:56:28.070755Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:56:28.070768Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:56:28.070850Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:56:28.070872Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:56:28.070885Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:56:28.070932Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:56:28.071034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:56:28.071633Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-07-08T11:56:28.071644Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862638:2473] txid# 281474976715658 SEND to# [1:7524678716673862637:2472] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-07-08T11:56:28.243188Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Handle TEvProposeTransaction 2025-07-08T11:56:28.243206Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:56:28.243227Z node 1 :TX_PROXY DEBUG: actor# [1:7524678712378894802:2138] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7524678716673862678:2487] 2025-07-08T11:56:28.243996Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678716673862678:2487] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchem ... AT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710663:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-07-08T11:57:16.626515Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72075186224037891 2025-07-08T11:57:16.626531Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710663:0 progress is 1/1 2025-07-08T11:57:16.626534Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2025-07-08T11:57:16.626541Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710663:0 progress is 1/1 2025-07-08T11:57:16.626543Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2025-07-08T11:57:16.626551Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-07-08T11:57:16.626563Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710663, ready parts: 1/1, is published: false 2025-07-08T11:57:16.626568Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-07-08T11:57:16.626570Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710663 ready parts: 1/1 2025-07-08T11:57:16.626573Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710663:0 2025-07-08T11:57:16.626576Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710663, publications: 1, subscribers: 0 2025-07-08T11:57:16.626578Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710663, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-07-08T11:57:16.623888Z node 59 :TX_PROXY DEBUG: actor# [59:7524678913969866496:2100] Handle TEvProposeTransaction 2025-07-08T11:57:16.623912Z node 59 :TX_PROXY DEBUG: actor# [59:7524678913969866496:2100] TxId# 281474976710663 ProcessProposeTransaction 2025-07-08T11:57:16.623927Z node 59 :TX_PROXY DEBUG: actor# [59:7524678913969866496:2100] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7524678922559802219:2746] 2025-07-08T11:57:16.624839Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51470" 2025-07-08T11:57:16.624856Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-07-08T11:57:16.624865Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-07-08T11:57:16.624882Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:16.625104Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:16.625127Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-07-08T11:57:16.625138Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72075186224037891} 2025-07-08T11:57:16.625693Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 HANDLE EvClientConnected 2025-07-08T11:57:16.632276Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710663, response: Status: StatusSuccess TxId: 281474976710663 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-07-08T11:57:16.632321Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710663, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-07-08T11:57:16.632374Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-07-08T11:57:16.632377Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710663, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-07-08T11:57:16.632429Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976710663, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-07-08T11:57:16.632453Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-07-08T11:57:16.632456Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7524678915489129294:2266], at schemeshard: 72075186224037891, txId: 281474976710663, path id: 1 2025-07-08T11:57:16.632466Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:7524678915489129294:2266], at schemeshard: 72075186224037891, txId: 281474976710663, path id: 1 2025-07-08T11:57:16.632707Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-07-08T11:57:16.632731Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802219:2746] txid# 281474976710663 SEND to# [59:7524678922559802218:2299] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-07-08T11:57:16.633173Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710663 2025-07-08T11:57:16.633191Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976710663 2025-07-08T11:57:16.633194Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976710663 2025-07-08T11:57:16.633197Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976710663, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-07-08T11:57:16.633202Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-07-08T11:57:16.633232Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976710663, subscribers: 0 2025-07-08T11:57:16.636582Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976710663 TEST clusteradmin triggers auth on tenant TClient is connected to server localhost:17100 TClient::Ls request: /dc-1/tenant-db 2025-07-08T11:57:16.682037Z node 59 :TX_PROXY DEBUG: actor# [59:7524678913969866496:2100] Handle TEvNavigate describe path /dc-1/tenant-db 2025-07-08T11:57:16.682057Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802225:2751] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-07-08T11:57:16.682182Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802225:2751] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:16.682223Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802225:2751] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-07-08T11:57:16.689683Z node 59 :TX_PROXY DEBUG: Actor# [59:7524678922559802225:2751] Handle TEvDescribeSchemeResult Forward to# [59:7524678922559802224:2750] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-07-08T11:57:16.742036Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:16.742134Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-07-08T11:57:16.742345Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected >> test.py::test[window-win_by_all_aggregate--Results] [GOOD] >> test.py::test[window-win_func_aggr_hist--ForceBlocks] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> test.py::test[window-full/session--ForceBlocks] [GOOD] >> test.py::test[window-full/session--Results] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> test.py::test[pg-tpcds-q54-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> test.py::test[aggregate-percentiles_grouped--ForceBlocks] [GOOD] >> test.py::test[aggregate-percentiles_grouped--Results] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |62.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> test.py::test[select-exists_false-default.txt-Results] [GOOD] >> test.py::test[select-exists_true-default.txt-Results] >> test.py::test[pg-tpcds-q34-default.txt-Results] [GOOD] |62.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] |62.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> test.py::test[distinct-distinct_having_no_agg-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[blocks-date_sub_interval_scalar--Results] [GOOD] >> test.py::test[blocks-decimal_unary--ForceBlocks] |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] >> KqpRanges::MergeRanges >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] >> test.py::test[flatten_by-flatten_with_join--Results] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] Test command err: 2025-07-08T11:57:19.045350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678932288594502:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:19.045394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000ac0/r3tmp/tmpjf98qT/pdisk_1.dat 2025-07-08T11:57:19.246299Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22570, node 1 2025-07-08T11:57:19.297092Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:19.297104Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:19.297106Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:19.297143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:19.330332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:19.337267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:19.372462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:19.372492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:19.382934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:19.719219Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Handle TEvProposeTransaction 2025-07-08T11:57:19.719236Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:57:19.719249Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678932288595237:2601] 2025-07-08T11:57:19.731277Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-07-08T11:57:19.731302Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:19.731430Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T11:57:19.731441Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:19.731462Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:19.731493Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:19.731502Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:57:19.731542Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:57:19.731911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:19.737452Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-07-08T11:57:19.737475Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595237:2601] txid# 281474976715658 SEND to# [1:7524678932288595236:2296] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-07-08T11:57:19.771492Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Handle TEvProposeTransaction 2025-07-08T11:57:19.771506Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] TxId# 281474976715659 ProcessProposeTransaction 2025-07-08T11:57:19.771519Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [1:7524678932288595387:2718] 2025-07-08T11:57:19.772153Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-07-08T11:57:19.772168Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:19.772178Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:19.772233Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:19.772266Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:19.772277Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-07-08T11:57:19.772314Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 HANDLE EvClientConnected 2025-07-08T11:57:19.773149Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-07-08T11:57:19.773161Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595387:2718] txid# 281474976715659 SEND to# [1:7524678932288595386:2307] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 53} 2025-07-08T11:57:19.853220Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7524678932288595570:2318] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T11:57:19.878343Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Handle TEvProposeTransaction 2025-07-08T11:57:19.878361Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] TxId# 281474976715660 ProcessProposeTransaction 2025-07-08T11:57:19.878376Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Cookie# 0 userReqId# "" txid# 281474976715660 SEND to# [1:7524678932288595664:2917] 2025-07-08T11:57:19.879088Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-07-08T11:57:19.879094Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:19.879104Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:19.879168Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:19.879184Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:19.879191Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-07-08T11:57:19.879224Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 HANDLE EvClientConnected 2025-07-08T11:57:19.880377Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-07-08T11:57:19.880385Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678932288595664:2917] txid# 281474976715660 SEND to# [1:7524678932288595663:2327] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-07-08T11:57:19.928745Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7524678932288595826:2334] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T ... TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.636671Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.636673Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710766, id# 281474976715664 2025-07-08T11:57:20.636682Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateChangefeed propose: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710766 2025-07-08T11:57:20.636697Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.637891Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.637901Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710766, status# StatusAccepted 2025-07-08T11:57:20.637921Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976710766 Issue: '' } 2025-07-08T11:57:20.639402Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.652859Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][1:7524678936583565091:2434] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T11:57:20.658248Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.658257Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710766 2025-07-08T11:57:20.658281Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-07-08T11:57:20.658755Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.658774Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.658777Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710767, id# 281474976715664 2025-07-08T11:57:20.658787Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateConsumers propose: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710767 2025-07-08T11:57:20.658846Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.658953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710767:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.659691Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.659699Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710767, status# StatusAccepted 2025-07-08T11:57:20.659716Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976710767 Issue: '' } 2025-07-08T11:57:20.660321Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.668417Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:20.668426Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710767 2025-07-08T11:57:20.668740Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:20.959517Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7524678936583565278:2446] [0] Resolve database: name# /Root 2025-07-08T11:57:20.959686Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7524678936583565278:2446] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:20.959692Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7524678936583565278:2446] [0] Send request: schemeShardId# 72057594046644480 2025-07-08T11:57:20.960000Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7524678936583565278:2446] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:3196" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1751975840 } EndTime { seconds: 1751975840 } } 2025-07-08T11:57:20.961031Z node 1 :TX_PROXY DEBUG: actor# [1:7524678932288594517:2139] Handle TEvNavigate describe path /Root/table 2025-07-08T11:57:20.961052Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678936583565284:4784] HANDLE EvNavigateScheme /Root/table 2025-07-08T11:57:20.961101Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678936583565284:4784] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:20.961124Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678936583565284:4784] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-07-08T11:57:20.961425Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678936583565284:4784] Handle TEvDescribeSchemeResult Forward to# [1:7524678936583565282:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1751975840517 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 4 IsBackup: false CdcStreams { Name: "a" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 14 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "b" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 16 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "c" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 12 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 >> test.py::test[expr-constraints_of--ForceBlocks] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> test.py::test[pg-tpcds-q36-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q36-default.txt-Results] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> KqpRanges::MergeRanges [GOOD] >> test.py::test[insert-append_sorted-to_sorted_calc-Results] [GOOD] >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydbd/ydbd >> test.py::test[join-pullup_exclusion-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[join-pullup_inner--ForceBlocks] |62.2%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/opt/unittest >> KqpRanges::MergeRanges [GOOD] Test command err: Trying to start YDB, gRPC: 28048, MsgBus: 25675 2025-07-08T11:57:01.811343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678858208863642:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:01.812230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002097/r3tmp/tmpNCLBpz/pdisk_1.dat 2025-07-08T11:57:02.281694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:02.282453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:02.282466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:02.285533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28048, node 1 2025-07-08T11:57:02.405210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:02.405222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:02.405224Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:02.405272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25675 TClient is connected to server localhost:25675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:02.661635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.665513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:57:02.681964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.715474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.763876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.810253Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:02.829189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:03.106367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.120991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.144363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.179323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.243501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.277947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.352700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.825280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:03.949787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-07-08T11:57:04.092218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-07-08T11:57:04.165862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-07-08T11:57:04.278304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24953, MsgBus: 14428 2025-07-08T11:57:06.335507Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524678879840270414:2079];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:06.335926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002097/r3tmp/tmp9pLCid/pdisk_1.dat 2025-07-08T11:57:06.366801Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:06.371933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:06.371964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:06.372921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24953, node 2 2025-07-08T11:57:06.385507Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:06.385519Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:06.385521Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:06.385560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14428 TClient is connected to server localhost:14428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:06.574034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:06.581463Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:06.597842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:06.633248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:06.672230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:06.705463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:06.796048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:06.860071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:06.924778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:06.960197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:06.978150Z node 2 :FLAT_TX_SCHEMESHARD WA ... ode 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:19.977337Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:19.983097Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10044 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:20.106236Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:20.113470Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:20.129237Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:20.170309Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:20.233617Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:20.318967Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:20.634385Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.658192Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.678848Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.689115Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.714529Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.725911Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.744638Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:20.877007Z node 9 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:20.979303Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:21.136993Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975841175, txId: 281474976715672] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-07-08T11:57:21.237232Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975841273, txId: 281474976715674] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ Trying to start YDB, gRPC: 27753, MsgBus: 62565 2025-07-08T11:57:21.731540Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7524678944216349306:2087];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:21.732588Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/002097/r3tmp/tmp2VAnoj/pdisk_1.dat 2025-07-08T11:57:21.775486Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27753, node 10 2025-07-08T11:57:21.789419Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:21.789434Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:21.789436Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:21.789493Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62565 2025-07-08T11:57:21.836142Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:21.836167Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:21.838514Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:57:21.864458Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:57:21.868529Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:21.873869Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:21.898687Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:21.922043Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:21.937985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:22.278768Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.291403Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.309341Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.322078Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.334670Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.348786Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.370273Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.568811Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:22.722330Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975842757, txId: 281474976715672] shutting down 2025-07-08T11:57:22.734968Z node 10 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |62.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> test.py::test[dq-precompute_parallel_mix--ForceBlocks] [GOOD] >> test.py::test[dq-precompute_parallel_mix--Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--ForceBlocks] >> test.py::test[blocks-date_less_or_equal--ForceBlocks] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[dq-wrong_script_segf--ForceBlocks] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BackupRestore::RestoreTablePartitioningSettings >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> test.py::test[join-yql-14829_left-off-ForceBlocks] [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_1-default.txt-Results] [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> test.py::test[table_range-merge_non_strict--ForceBlocks] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::PrefixedVectorIndex >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpc8MzcN/pdisk_1.dat 2025-07-08T11:57:16.989180Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678920483171946:2240];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:16.989330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:57:17.178150Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62695, node 1 2025-07-08T11:57:17.219859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:17.219882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:17.229845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:17.300894Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:17.300906Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:17.300909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:17.300961Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:17.416438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:17.829013Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:18.024819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:18.242403Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710659. Ctx: { TraceId: 01jzmybgdb1tpw0qzgkcqazgkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBlMmQ4OS1kMDVmOTEyZC1hZDRmYjRjYi1lZTBjYmVmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:18.301507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jzmybght5ktrsj44pksvv058, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBlMmQ4OS1kMDVmOTEyZC1hZDRmYjRjYi1lZTBjYmVmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/"Create temporary directory "/Root/~backup_20250708T115718" in database2025-07-08T11:57:18.327742Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115718/table" }Backup table "/Root/~backup_20250708T115718/table" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table"Describe table "/Root/~backup_20250708T115718/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/permissions.pb"Read table "/Root/~backup_20250708T115718/table"Write data into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/data_00.csv"Drop table "/Root/~backup_20250708T115718/table"2025-07-08T11:57:18.998588Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250708T115718" in database2025-07-08T11:57:19.041945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/" to "/Root"2025-07-08T11:57:19.085226Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Process "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table" to "/Root/table"2025-07-08T11:57:19.202004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/data_00.csv"2025-07-08T11:57:19.306058Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jzmybhhjax6r83zbn27n2ypz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNmN2FiZDUtMjk0MzViN2EtNDI0OTAxZTctNDA2OGY0ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpAKOWeW/table/permissions.pb"2025-07-08T11:57:19.342441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710670:0, at schemeshard: 72057594046644480 Restore completed successfully2025-07-08T11:57:19.386253Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jzmybhkq5x72475rnajznfac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzBlMmQ4OS1kMDVmOTEyZC1hZDRmYjRjYi1lZTBjYmVmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp8D2hDW/pdisk_1.dat 2025-07-08T11:57:20.955155Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:57:21.056824Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:21.079414Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:21.079439Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:21.085866Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6893, node 4 2025-07-08T11:57:21.161274Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:21.161285Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:21.161288Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:21.161339Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:21.210235Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:21.214129Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:21.470712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/"Create temporary directory "/Root/~backup_20250708T115721" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115721/table" }Backup table "/Root/~backup_20250708T115721/table" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table"Describe table "/Root/~backup_20250708T115721/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/permissions.pb"Read table "/Root/~backup_20250708T115721/table"Write data into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/data_00.csv"Drop table "/Root/~backup_20250708T115721/table"Remove temporary directory "/Root/~backup_20250708T115721" in database2025-07-08T11:57:21.697995Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-07-08T11:57:21.702145Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-07-08T11:57:21.714154Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table" to "/Root/table"2025-07-08T11:57:21.771543Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:57:21.772832Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-07-08T11:57:21.772845Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-07-08T11:57:21.847033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T11:57:21.870245Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T11:57:21.937020Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Restore ACL "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpIZGP9O/table/permissions.pb"2025-07-08T11:57:21.991953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmpNgpJN2/pdisk_1.dat 2025-07-08T11:57:22.810189Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:57:22.846355Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4352, node 7 2025-07-08T11:57:22.897764Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:22.897775Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:22.897777Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:22.897831Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:22.901688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:22.901713Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:22.904520Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:22.921759Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:23.173803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:23.233884Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmybnc8eyxedj2057r53kfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRkYTk1OGItMmQ2NTRjYTMtZWEzMDYxM2UtNWY2NTE2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:23.242172Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmybnc8eyxedj2057r53kfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRkYTk1OGItMmQ2NTRjYTMtZWEzMDYxM2UtNWY2NTE2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:23.291286Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmybne00cbjcx2nnj9yhtjr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRkYTk1OGItMmQ2NTRjYTMtZWEzMDYxM2UtNWY2NTE2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/"Create temporary directory "/Root/~backup_20250708T115723" in database2025-07-08T11:57:23.348110Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115723/table" }2025-07-08T11:57:23.353209Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715663:1, at schemeshard: 72057594046644480 Backup table "/Root/~backup_20250708T115723/table" to "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table"Describe table "/Root/~backup_20250708T115723/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/permissions.pb"Read table "/Root/~backup_20250708T115723/table"Write data into "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/data_00.csv"Drop table "/Root/~backup_20250708T115723/table"2025-07-08T11:57:23.473825Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715666:1, at schemeshard: 72057594046644480 2025-07-08T11:57:23.492238Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found Remove temporary directory "/Root/~backup_20250708T115723" in database2025-07-08T11:57:23.506144Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:23.527485Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715668:1, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table" to "/Root/table"2025-07-08T11:57:23.547961Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-07-08T11:57:23.550382Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/data_00.csv"2025-07-08T11:57:23.580929Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmybnqh0fxvka248gdyne9n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTRiOTJhYjAtZjY0OTI5NGYtZTQ1ZTQzNmYtMmNlOThlYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000acc/r3tmp/tmp6XQc5y/table/permissions.pb"2025-07-08T11:57:23.592428Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715671:0, at schemeshard: 72057594046644480 Restore completed successfully2025-07-08T11:57:23.627950Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmybnrd8wz87pvbtmpf93mv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTRkYTk1OGItMmQ2NTRjYTMtZWEzMDYxM2UtNWY2NTE2Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-07-08T11:57:09.337298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678891035542470:2146];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:09.337348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b18/r3tmp/tmpa9ZmoP/pdisk_1.dat 2025-07-08T11:57:09.757843Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:09.769928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:09.769951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:09.785754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10479, node 1 2025-07-08T11:57:10.049386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:10.049404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:10.049405Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:10.049454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:10.122265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:10.141284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:57:10.353029Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:11.163597Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Handle TEvProposeTransaction 2025-07-08T11:57:11.163610Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] TxId# 281474976710658 ProcessProposeTransaction 2025-07-08T11:57:11.163623Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7524678899625477928:2608] 2025-07-08T11:57:11.191014Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-07-08T11:57:11.191036Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:11.191142Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T11:57:11.191151Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:11.191171Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:11.191194Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:11.191201Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-07-08T11:57:11.191229Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 HANDLE EvClientConnected 2025-07-08T11:57:11.195649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:11.198505Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-07-08T11:57:11.198520Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625477928:2608] txid# 281474976710658 SEND to# [1:7524678899625477927:2297] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-07-08T11:57:11.351713Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Handle TEvExecuteKqpTransaction 2025-07-08T11:57:11.351727Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] TxId# 281474976710659 ProcessProposeKqpTransaction 2025-07-08T11:57:11.352061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710659. Ctx: { TraceId: 01jzmyb9rz8eza098y0dt4509z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkMjU1MWEtY2YyZWJjMDYtMThkMDVkMWItZDhiYzU4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:11.399443Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Handle TEvExecuteKqpTransaction 2025-07-08T11:57:11.399461Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] TxId# 281474976710660 ProcessProposeKqpTransaction 2025-07-08T11:57:11.400178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jzmyb9t9385mw0fqpdfb95vj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRkMjU1MWEtY2YyZWJjMDYtMThkMDVkMWItZDhiYzU4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:11.414585Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Handle TEvNavigate describe path /Root 2025-07-08T11:57:11.414611Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625478111:2740] HANDLE EvNavigateScheme /Root 2025-07-08T11:57:11.414673Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625478111:2740] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:11.414765Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625478111:2740] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-07-08T11:57:11.414995Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678899625478111:2740] Handle TEvDescribeSchemeResult Forward to# [1:7524678899625478109:2318] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 65 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751975830178 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-07-08T11:57:11.416557Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678899625478113:2319] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/table TableId: [72057594046644480:2:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:11.416564Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678899625478113:2319] [0] Allocate txId 2025-07-08T11:57:11.416625Z node 1 :TX_PROXY DEBUG: actor# [1:7524678891035542479:2131] Handle TEvAllocateTxId 2025-07-08T11:57:11.416664Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678899625478113:2319] [281474976710661] TEvTxUserProxy::TEvAllocateTxIdResult 2025-07-08T11:57:11.416668Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678899625478113:2319] [281474976710661] Resolve database: name# /Root 2025-07-08T11:57:11.416772Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678899625478113:2319] [281474976710661] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, ... tempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=c8d198f3d8be2c1f88cc2fc612773e71552a77caa14511f10342a233194b234c content-type: application/xml range: bytes=0-30 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z S3_MOCK::HttpServeRead: /test_bucket/view/metadata.json / 31 2025-07-08T11:57:23.597346Z node 10 :IMPORT DEBUG: HandleMetadata TEvExternalStorage::TEvGetObjectResponse: self# [10:7524678949260012136:2199], result# 3486cb59549c9f2b1f64d3c0399801a0 REQUEST: HEAD /test_bucket/view/scheme.pb HTTP/1.1 HEADERS: Host: localhost:21667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7799E502-0392-49A5-B3EA-EB28149273D2 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=c3b0bbb8a734cfcc3aaecd112eb61f2b94804974f1419093184b3ae863a37760 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z 2025-07-08T11:57:23.611030Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7524678949260012136:2199], result# No response body. REQUEST: HEAD /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:21667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 363D8B1F-A0B3-4A80-9933-A419210AF726 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=4ab807c57378dd1dc4e4f737f9f97e65260925087421fc88f30e910246681bf1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-07-08T11:57:23.612016Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7524678949260012136:2199], result# HeadObjectResult { ETag: 54623f53d68141118383b3390c4965d5 ContentLength: 165 } REQUEST: GET /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:21667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D485909C-27CE-493D-85FE-84F17CD12A9E amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=5f90b19aef3031d7a48ed6f7fae73cebe7f0bef9d55e48ba61cb6e193e6fdcf4 content-type: application/xml range: bytes=0-164 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-07-08T11:57:23.613776Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [10:7524678949260012136:2199], result# 54623f53d68141118383b3390c4965d5 REQUEST: HEAD /test_bucket/view/permissions.pb HTTP/1.1 HEADERS: Host: localhost:21667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 91FD67BE-D943-4BDC-94AB-3D6B0B85D86E amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=723aadabcde4a5fc463a45b932c89822977db5608f3881616b7e61d622e414db content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z 2025-07-08T11:57:23.633353Z node 10 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [10:7524678949260012136:2199], result# No response body. REQUEST: GET /test_bucket?prefix=view HTTP/1.1 HEADERS: Host: localhost:21667 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94521630-0282-41CB-82AC-5018318E8CEC amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250708/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=8aef5d05f57d99ef0fadd80b28d93d0ab6b150746be53930d45adf04f607f530 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250708T115723Z S3_MOCK::HttpServeList: view 2025-07-08T11:57:23.639890Z node 10 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [10:7524678949260012136:2199], result# ListObjectsResult { } 2025-07-08T11:57:23.639903Z node 10 :IMPORT INFO: Reply: self# [10:7524678949260012136:2199], success# 1, error# 2025-07-08T11:57:23.639928Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:23.639932Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715662, itemIdx# 0, success# 1 2025-07-08T11:57:23.645509Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:23.648490Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [10:7524678949260012145:2727], status: SUCCESS 2025-07-08T11:57:23.648502Z node 10 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [10:7524678949260012145:2727], status: SUCCESS 2025-07-08T11:57:23.648556Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [10:7524678949260012145:2727], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"view\" QueryText: \"SELECT 1 AS Key UNION SELECT 2 AS Key UNION SELECT 3 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-07-08T11:57:23.648605Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:23.648609Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976715662, itemIdx# 0, status# SUCCESS, error# 2025-07-08T11:57:23.648645Z node 10 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-07-08T11:57:23.650843Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:23.650871Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:23.650874Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 281474976715662 2025-07-08T11:57:23.650885Z node 10 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710758 2025-07-08T11:57:23.650902Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:23.651464Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:23.651469Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-07-08T11:57:23.651489Z node 10 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715662 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 4] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710758 Issue: '' } 2025-07-08T11:57:23.651799Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:23.653501Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-07-08T11:57:23.653510Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-07-08T11:57:23.653830Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:23.791961Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7524678949260012193:2312] [0] Resolve database: name# /Root 2025-07-08T11:57:23.792138Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7524678949260012193:2312] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:23.792143Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7524678949260012193:2312] [0] Send request: schemeShardId# 72057594046644480 2025-07-08T11:57:23.792553Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7524678949260012193:2312] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715662 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:21667" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1751975843 } EndTime { seconds: 1751975843 } } 2025-07-08T11:57:23.813024Z node 10 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:23.816990Z node 10 :TX_PROXY DEBUG: actor# [10:7524678944965043798:2135] Handle TEvExecuteKqpTransaction 2025-07-08T11:57:23.816997Z node 10 :TX_PROXY DEBUG: actor# [10:7524678944965043798:2135] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-07-08T11:57:23.817165Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmybnyqbg82kgz7b0x25zmq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWE4OWQyNmUtNjMzMTVkMWUtYjM1Y2MyODMtZWM5OTI5ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root >> test.py::test[join-opt_on_opt_side--ForceBlocks] [GOOD] >> test.py::test[join-opt_on_opt_side--Results] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> TGroupMapperTest::Mirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-07-08T11:57:08.502048Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678887612737712:2247];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b24/r3tmp/tmp1BmLN0/pdisk_1.dat 2025-07-08T11:57:08.541936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:57:08.587441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:08.600204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:08.600228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:08.603486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18433, node 1 2025-07-08T11:57:08.621525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:08.621535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:08.621536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:08.621572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:08.693871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:09.047424Z node 1 :TX_PROXY DEBUG: actor# [1:7524678887612737534:2115] Handle TEvProposeTransaction 2025-07-08T11:57:09.047440Z node 1 :TX_PROXY DEBUG: actor# [1:7524678887612737534:2115] TxId# 281474976715658 ProcessProposeTransaction 2025-07-08T11:57:09.047450Z node 1 :TX_PROXY DEBUG: actor# [1:7524678887612737534:2115] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7524678891907705728:2592] 2025-07-08T11:57:09.059337Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: true CreateView { Name: "view" QueryText: "SELECT 42" CapturedContext { PathPrefix: "" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } } UserToken: "" DatabaseName: "" 2025-07-08T11:57:09.059357Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:09.059458Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T11:57:09.059465Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:09.059481Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:09.059503Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:09.059512Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-07-08T11:57:09.059550Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 HANDLE EvClientConnected 2025-07-08T11:57:09.061522Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715658} 2025-07-08T11:57:09.061543Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705728:2592] txid# 281474976715658 SEND to# [1:7524678891907705727:2591] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 53} 2025-07-08T11:57:09.071024Z node 1 :TX_PROXY DEBUG: actor# [1:7524678887612737534:2115] Handle TEvNavigate describe path /Root/view 2025-07-08T11:57:09.071041Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705754:2616] HANDLE EvNavigateScheme /Root/view 2025-07-08T11:57:09.071207Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705754:2616] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:09.071225Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705754:2616] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/view" 2025-07-08T11:57:09.071358Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705754:2616] Handle TEvDescribeSchemeResult Forward to# [1:7524678891907705753:2295] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/Root/view" PathDescription { Self { Name: "view" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeView CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751975829114 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "view" PathId { OwnerId: 72057594046644480 LocalId: 2 } Version: 1 QueryText: "SELECT 42" CapturedContext { PathPrefix: "" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: "AnsiInForEmptyOrNullableItemsCollections" Pragmas: "AnsiLike" Pragmas: "FlexibleTypes" Pragmas: "AnsiCurrentRow" Pragmas: "WarnOnAnsiAliasShadowing" Pragmas: "AnsiOptionalAs" Pragmas: "EmitAggApply" } } } PathId: 2 PathOwnerId: 72057594046644480 2025-07-08T11:57:09.079249Z node 1 :TX_PROXY DEBUG: actor# [1:7524678887612737534:2115] Handle TEvNavigate describe path /Root 2025-07-08T11:57:09.079259Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705769:2621] HANDLE EvNavigateScheme /Root 2025-07-08T11:57:09.079299Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705769:2621] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:09.079318Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705769:2621] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root" Options { ShowPrivateTable: false } 2025-07-08T11:57:09.079438Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678891907705769:2621] Handle TEvDescribeSchemeResult Forward to# [1:7524678891907705767:2299] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 64 Record# Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751975828764 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/Root:test" Kind: "test" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "ssd" Kind: "ssd" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-07-08T11:57:09.080740Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678891907705771:2300] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/view TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindView DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:09.080743Z node 1 :TX_PROXY DEBUG: [CreateExport] [1:7524678891907705771:2300] [0] Allocate txId 2025-07-08T11:57:09.080773Z node 1 :TX_PROXY DEBUG: actor# [1:75246788 ... node 16 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710762 2025-07-08T11:57:23.649905Z node 16 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-07-08T11:57:24.033189Z node 16 :TX_PROXY DEBUG: [GetImport] [16:7524678956719402222:2369] [0] Resolve database: name# /Root 2025-07-08T11:57:24.035686Z node 16 :TX_PROXY DEBUG: [GetImport] [16:7524678956719402222:2369] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:24.035704Z node 16 :TX_PROXY DEBUG: [GetImport] [16:7524678956719402222:2369] [0] Send request: schemeShardId# 72057594046644480 2025-07-08T11:57:24.036007Z node 16 :TX_PROXY DEBUG: [GetImport] [16:7524678956719402222:2369] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:28966" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1751975843 } EndTime { seconds: 1751975843 } } 2025-07-08T11:57:24.045095Z node 16 :TX_PROXY DEBUG: actor# [16:7524678948129464845:2139] Handle TEvNavigate describe path /Root/table 2025-07-08T11:57:24.045173Z node 16 :TX_PROXY DEBUG: Actor# [16:7524678956719402230:4380] HANDLE EvNavigateScheme /Root/table 2025-07-08T11:57:24.045441Z node 16 :TX_PROXY DEBUG: Actor# [16:7524678956719402230:4380] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:24.045464Z node 16 :TX_PROXY DEBUG: Actor# [16:7524678956719402230:4380] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-07-08T11:57:24.045962Z node 16 :TX_PROXY DEBUG: Actor# [16:7524678956719402230:4380] Handle TEvDescribeSchemeResult Forward to# [16:7524678956719402226:2370] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1751975843513 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 9 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046644480 >> test.py::test[expr-constraints_of--ForceBlocks] [GOOD] >> test.py::test[expr-constraints_of--Results] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication >> test.py::test[select-exists_true-default.txt-Results] [GOOD] >> test.py::test[select-sampleselect-1000-Results] >> TGroupMapperTest::Mirror3dc [GOOD] >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings >> test.py::test[pg-tpcds-q36-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] >> test.py::test[blocks-decimal_unary--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync >> test.py::test[aggregate-percentiles_grouped--Results] [GOOD] >> test.py::test[bigdate-table_yt_native-wo_compat-ForceBlocks] [SKIPPED] >> TGroupMapperTest::NonUniformCluster >> test.py::test[bigdate-table_yt_native-wo_compat-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] >> test.py::test[udf-udf_call_with_group_and_limit--Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] Test command err: 2025-07-08T11:57:22.903441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678944923225416:2242];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:22.904353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpszCBhK/pdisk_1.dat 2025-07-08T11:57:23.022930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3324, node 1 2025-07-08T11:57:23.044204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:23.044214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:23.044215Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:23.044249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14837 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T11:57:23.075349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:23.075377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:23.081350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:23.097691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:23.105441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/"Create temporary directory "/Root/~backup_20250708T115723" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir"Create directory "/Root/~backup_20250708T115723/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir/permissions.pb"Remove directory "/Root/~backup_20250708T115723/dir"2025-07-08T11:57:23.248994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250708T115723" in database2025-07-08T11:57:23.277450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:23.285599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir"Restore empty directory "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmp0Y0Jxi/dir/permissions.pb"2025-07-08T11:57:23.392471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-07-08T11:57:24.367240Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524678957190782253:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:24.367258Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpKVMIyp/pdisk_1.dat 2025-07-08T11:57:24.407183Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13499, node 4 2025-07-08T11:57:24.423444Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:24.423455Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:24.423458Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:24.423498Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:24.468058Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:24.468093Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:24.469979Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:24.490837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:24.494053Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:24.910877Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:24.975320Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7524678957190783462:2312] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T11:57:24.996522Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7524678957190783737:2331] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T11:57:25.018674Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7524678961485751341:2356] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } GetChangefeedAndTopicDescriptions: Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/"Create temporary directory "/Root/~backup_20250708T115725" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115725/table" }Backup table "/Root/~backup_20250708T115725/table" to "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table"Describe table "/Root/~backup_20250708T115725/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/scheme.pb"Describe table "/Root/table"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a"Write changefeed into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a/topic_description.pb"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b"Write changefeed into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b/topic_description.pb"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c"Write changefeed into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c/topic_description.pb"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/permissions.pb"Read table "/Root/~backup_20250708T115725/table"Write data into "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/data_00.csv"Drop table "/Root/~backup_20250708T115725/table"2025-07-08T11:57:25.176695Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037895 not found Remove temporary directory "/Root/~backup_20250708T115725" in database2025-07-08T11:57:25.179176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:25.184109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715668:2, at schemeshard: 72057594046644480 2025-07-08T11:57:25.191484Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037894 not found 2025-07-08T11:57:25.191493Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-07-08T11:57:25.191501Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-07-08T11:57:25.191579Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-07-08T11:57:25.191598Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-07-08T11:57:25.191599Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-07-08T11:57:25.194991Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-07-08T11:57:25.195011Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-07-08T11:57:25.195016Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-07-08T11:57:25.195020Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-07-08T11:57:25.195024Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-07-08T11:57:25.195028Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found Restore "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/" to "/Root"2025-07-08T11:57:25.208362Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table" to "/Root/table"2025-07-08T11:57:25.234871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/data_00.csv"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a"Read changefeed from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a/topic_description.pb"2025-07-08T11:57:25.294320Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7524678961485752376:2431] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:13:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/a"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c"Read changefeed from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c/topic_description.pb"2025-07-08T11:57:25.319029Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7524678961485752614:2442] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:15:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/c"Process "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b"Read changefeed from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b/topic_description.pb"2025-07-08T11:57:25.348253Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7524678961485752871:2456] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/b"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a73/r3tmp/tmpdtraMe/table/permissions.pb"2025-07-08T11:57:25.353927Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 Restore completed successfully2025-07-08T11:57:25.374557Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> BackupRestoreS3::PrefixedVectorIndex [GOOD] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] >> test.py::test[flatten_by-flatten_with_resource--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_resource--Results] |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] >> TGroupMapperTest::MapperSequentialCalls >> test.py::test[pg-tpcds-q58-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-constraints_of--Results] [GOOD] >> test.py::test[expr-empty_iterator--ForceBlocks] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> TTxLocatorTest::TestZeroRange >> test.py::test[window-win_func_aggr_hist--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_hist--Results] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> TTxLocatorTest::TestZeroRange [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTableReadReplicasSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::PrefixedVectorIndex [GOOD] Test command err: 2025-07-08T11:57:02.529237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678861688374368:2236];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:02.577834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpk0Sgm2/pdisk_1.dat 2025-07-08T11:57:02.681227Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8938, node 1 2025-07-08T11:57:02.751320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:02.751331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:02.751333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:02.751370Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:02.835733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:02.835753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:02.843328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:02.889715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:02.909289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:03.509075Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:03.677460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyb28p0yfrafpzyf8eyyvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2YjliMDItNjZkZGM1NWMtMjZiYjQ0NTktOTllNjVhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/"Create temporary directory "/Root/~backup_20250708T115703" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view"Write view into "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115703" in database2025-07-08T11:57:03.846406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715661:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view"Restore view "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpheWEoX/view/permissions.pb"2025-07-08T11:57:03.991943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 Restore completed successfully2025-07-08T11:57:04.041239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyb2kzbgf5rhd0kz35kefh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY2YjliMDItNjZkZGM1NWMtMjZiYjQ0NTktOTllNjVhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:06.453187Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524678879871599800:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:06.453206Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b3d/r3tmp/tmpyiCoSv/pdisk_1.dat 2025-07-08T11:57:06.697889Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:06.697913Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:06.706155Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:06.709639Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10130, node 4 2025-07-08T11:57:06.889977Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:06.889989Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:06.889991Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:06.890030Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:07.045400Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:07.457208Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:07.645237Z node 4 :TX_PROXY DEBUG: actor# [4:7524678879871599881:2138] Handle TEvProposeTransaction 2025-07-08T11:57:07.645251Z node 4 :TX_PROXY DEBUG: actor# [4:7524678879871599881:2138] TxId# 281474976710658 ProcessProposeTransaction 2025-07-08T11:57:07.645264Z node 4 :TX_PROXY DEBUG: actor# [4:7524678879871599881:2138] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [4:7524678884166567997:2609] 2025-07-08T11:57:07.664311Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { PartitioningPolicy { MinPartitionsCount: 10 SplitByLoadSettings { Enabled: true } } } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-07-08T11:57:07.664338Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:57:07.664505Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T11:57:07.664551Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:57:07.664613Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:07.664641Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:57:07.664650Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-07-08T11:57:07.664694Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 HANDLE EvClientConnected 2025-07-08T11:57:07.666320Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:07.673652Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-07-08T11:57:07.673673Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166567997:2609] txid# 281474976710658 SEND to# [4:7524678884166567996:2297] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-07-08T11:57:07.754073Z node 4 :TX_PROXY DEBUG: actor# [4:7524678879871599881:2138] Handle TEvNavigate describe path /Root/table 2025-07-08T11:57:07.754093Z node 4 :TX_PROXY DEBUG: Actor# [4:7524678884166568136:2722] HANDLE EvNavigateScheme ... EvDescribeSchemeResult Forward to# [22:7524678965598170912:2388] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1751975845536 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 10 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Group" KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-07-08T11:57:27.520565Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-07-08T11:57:27.520628Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-07-08T11:57:27.520709Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-07-08T11:57:27.521075Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.521160Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-07-08T11:57:27.523126Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523146Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523156Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523171Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-07-08T11:57:27.523191Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523207Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-07-08T11:57:27.523231Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-07-08T11:57:27.523340Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2106] requested range size#0 2025-07-08T11:57:27.523423Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523431Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.523441Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-07-08T11:57:27.523445Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:71:2106] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[join-pullup_inner--ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner--Results] >> test.py::test[blocks-interval_add_interval--ForceBlocks] >> test.py::test[join-opt_on_opt_side--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--ForceBlocks] >> TGroupMapperTest::MapperSequentialCalls [GOOD] |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> test.py::test[key_filter-calc_dependent-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-calc_dependent-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-07-08T11:57:27.950448Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-07-08T11:57:27.950532Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-07-08T11:57:27.950661Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-07-08T11:57:27.951049Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.951136Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-07-08T11:57:27.953412Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.953431Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.953440Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.953457Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-07-08T11:57:27.953479Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.953496Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-07-08T11:57:27.953521Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-07-08T11:57:27.953744Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2114] requested range size#100000 2025-07-08T11:57:27.953851Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2118] requested range size#100000 2025-07-08T11:57:27.953884Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2116] requested range size#100000 2025-07-08T11:57:27.953931Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2120] requested range size#100000 2025-07-08T11:57:27.953950Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2122] requested range size#100000 2025-07-08T11:57:27.953993Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954012Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954029Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2124] requested range size#100000 2025-07-08T11:57:27.954049Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:73:2108] requested range size#100000 2025-07-08T11:57:27.954068Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954088Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954115Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954127Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2110] requested range size#100000 2025-07-08T11:57:27.954145Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954164Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954174Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2112] requested range size#100000 2025-07-08T11:57:27.954200Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954208Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954217Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:71:2106] requested range size#100000 2025-07-08T11:57:27.954235Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954250Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-07-08T11:57:27.954256Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2114] TEvAllocateResult from# 0 to# 100000 2025-07-08T11:57:27.954276Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954287Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-07-08T11:57:27.954291Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2118] TEvAllocateResult from# 100000 to# 200000 2025-07-08T11:57:27.954300Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-07-08T11:57:27.954304Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2116] TEvAllocateResult from# 200000 to# 300000 2025-07-08T11:57:27.954315Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954328Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954337Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-07-08T11:57:27.954341Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:85:2120] TEvAllocateResult from# 300000 to# 400000 2025-07-08T11:57:27.954351Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954359Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954366Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-07-08T11:57:27.954370Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2122] TEvAllocateResult from# 400000 to# 500000 2025-07-08T11:57:27.954382Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-07-08T11:57:27.954386Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:89:2124] TEvAllocateResult from# 500000 to# 600000 2025-07-08T11:57:27.954399Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954409Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954416Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-07-08T11:57:27.954420Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:73:2108] TEvAllocateResult from# 600000 to# 700000 2025-07-08T11:57:27.954437Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954444Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-07-08T11:57:27.954449Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2110] TEvAllocateResult from# 700000 to# 800000 2025-07-08T11:57:27.954463Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954469Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-07-08T11:57:27.954473Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:77:2112] TEvAllocateResult from# 800000 to# 900000 2025-07-08T11:57:27.954486Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.954494Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-07-08T11:57:27.954498Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:71:2106] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-07-08T11:57:27.955352Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:122:2156] requested range size#100000 2025-07-08T11:57:27.955422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:118:2152] requested range size#100000 2025-07-08T11:57:27.955477Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.955489Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:120:2154] requested range size#100000 2025-07-08T11:57:27.955508Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.955526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:124:2158] requested range size#100000 2025-07-08T11:57:27.955557Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:126:2160] requested range size#100000 2025-07-08T11:57:27.955584Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.955604Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:128:2162] requested range size#100000 2025-07-08T11:57:27.955622Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.955636Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [720575940464476 ... 000 2025-07-08T11:57:27.963001Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963012Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-07-08T11:57:27.963015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:398:2432] TEvAllocateResult from# 8300000 to# 8400000 2025-07-08T11:57:27.963025Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963036Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-07-08T11:57:27.963040Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:400:2434] TEvAllocateResult from# 8400000 to# 8500000 2025-07-08T11:57:27.963055Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963063Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-07-08T11:57:27.963067Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:402:2436] TEvAllocateResult from# 8500000 to# 8600000 2025-07-08T11:57:27.963078Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963089Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-07-08T11:57:27.963093Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:404:2438] TEvAllocateResult from# 8600000 to# 8700000 2025-07-08T11:57:27.963106Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963114Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-07-08T11:57:27.963118Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:406:2440] TEvAllocateResult from# 8700000 to# 8800000 2025-07-08T11:57:27.963126Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963133Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-07-08T11:57:27.963137Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:408:2442] TEvAllocateResult from# 8800000 to# 8900000 2025-07-08T11:57:27.963160Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-07-08T11:57:27.963165Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:410:2444] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-07-08T11:57:27.963607Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:442:2476] requested range size#100000 2025-07-08T11:57:27.963668Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:444:2478] requested range size#100000 2025-07-08T11:57:27.963718Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:446:2480] requested range size#100000 2025-07-08T11:57:27.963734Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963778Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:448:2482] requested range size#100000 2025-07-08T11:57:27.963823Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:432:2466] requested range size#100000 2025-07-08T11:57:27.963840Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963860Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963874Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:450:2484] requested range size#100000 2025-07-08T11:57:27.963908Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963923Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:434:2468] requested range size#100000 2025-07-08T11:57:27.963939Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963961Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.963977Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:436:2470] requested range size#100000 2025-07-08T11:57:27.963992Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964010Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964028Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:438:2472] requested range size#100000 2025-07-08T11:57:27.964043Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964061Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964080Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-07-08T11:57:27.964084Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:442:2476] TEvAllocateResult from# 9000000 to# 9100000 2025-07-08T11:57:27.964094Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964110Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:440:2474] requested range size#100000 2025-07-08T11:57:27.964127Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964143Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-07-08T11:57:27.964147Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:444:2478] TEvAllocateResult from# 9100000 to# 9200000 2025-07-08T11:57:27.964156Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964173Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-07-08T11:57:27.964177Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:446:2480] TEvAllocateResult from# 9200000 to# 9300000 2025-07-08T11:57:27.964185Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964198Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-07-08T11:57:27.964202Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:448:2482] TEvAllocateResult from# 9300000 to# 9400000 2025-07-08T11:57:27.964210Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964222Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-07-08T11:57:27.964226Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:432:2466] TEvAllocateResult from# 9400000 to# 9500000 2025-07-08T11:57:27.964248Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-07-08T11:57:27.964254Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:450:2484] TEvAllocateResult from# 9500000 to# 9600000 2025-07-08T11:57:27.964262Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964275Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-07-08T11:57:27.964278Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:434:2468] TEvAllocateResult from# 9600000 to# 9700000 2025-07-08T11:57:27.964285Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964291Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964305Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-07-08T11:57:27.964308Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:436:2470] TEvAllocateResult from# 9700000 to# 9800000 2025-07-08T11:57:27.964315Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-07-08T11:57:27.964328Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-07-08T11:57:27.964331Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:438:2472] TEvAllocateResult from# 9800000 to# 9900000 2025-07-08T11:57:27.964343Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-07-08T11:57:27.964347Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:440:2474] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS >> test.py::test[insert-select_relabel-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] |62.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] >> test.py::test[flatten_by-flatten_with_resource--Results] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] >> TContinuousBackupTests::TakeIncrementalBackup >> BackupRestore::RestoreIndexTableReadReplicasSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries |62.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1 [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-minmax_strings--ForceBlocks] >> test.py::test[table_range-merge_non_strict--ForceBlocks] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[distinct-distinct_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_star-default.txt-Results] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:30.157171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:30.157200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:30.157205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:30.157210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:30.157215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:30.157218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:30.157226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:30.157238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:30.157318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:30.210319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:30.210337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:30.229084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:30.229148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:30.229316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:30.232348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:30.232398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:30.232618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:30.232896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:30.236119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:30.236162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:30.236619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:30.236631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:30.236648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:30.236656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:30.236662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:30.236689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.238642Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:30.337091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:30.337168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.337227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:30.337270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:30.337281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.339669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:30.339705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:30.339759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.339771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:30.339776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:30.339782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:30.344396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.344424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:30.344432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:30.352031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.352057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.352065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:30.352073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:30.352911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:30.353470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:30.353511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:30.353687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:30.353710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:30.353720Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:30.353797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:30.353806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:30.353835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:30.353847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:30.354267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:30.354274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:30.354313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:30.354318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:30.354328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:30.354335Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:30.354346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:30.354350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:30.354355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:30.354358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:30.354362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:30.354368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:30.354372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:30.354376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:30.354386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:30.354392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:30.354396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:30.354835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:30.354851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 86233409546, at schemeshard: 72057594046678944 2025-07-08T11:57:31.042210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-07-08T11:57:31.045527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-07-08T11:57:31.045616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-07-08T11:57:31.045678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-07-08T11:57:31.045686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-07-08T11:57:31.045698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-07-08T11:57:31.045702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-07-08T11:57:31.045706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-07-08T11:57:31.045709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-07-08T11:57:31.045713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-07-08T11:57:31.045729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:339:2318] message: TxId: 103 2025-07-08T11:57:31.045735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-07-08T11:57:31.045742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:57:31.045746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:57:31.045756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:57:31.045760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-07-08T11:57:31.045762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-07-08T11:57:31.045774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:57:31.045777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-07-08T11:57:31.045780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-07-08T11:57:31.045787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:57:31.045790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-07-08T11:57:31.045793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-07-08T11:57:31.045799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:57:31.046480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:57:31.046488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:725:2629] TestWaitNotification: OK eventTxId 103 2025-07-08T11:57:31.046595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:57:31.046652Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 63us result status StatusSuccess 2025-07-08T11:57:31.046760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:31.046817Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:57:31.046836Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 21us result status StatusSuccess 2025-07-08T11:57:31.046922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:31.047241Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:57:31.047255Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 16us result status StatusSuccess 2025-07-08T11:57:31.047304Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |62.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-07-08T11:57:09.835413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678891170388454:2169];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:09.866289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpXsulfR/pdisk_1.dat 2025-07-08T11:57:10.086049Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11413, node 1 2025-07-08T11:57:10.116247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:10.116259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:10.116260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:10.116296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30328 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:10.201109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:10.201131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:10.208415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:10.209305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:10.837127Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/"Create temporary directory "/Root/~backup_20250708T115711" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view"Write view into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115711" in database2025-07-08T11:57:11.867462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view"Restore view "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmp1mLwxy/view/permissions.pb"2025-07-08T11:57:12.049304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-07-08T11:57:15.122054Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524678918360436884:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:15.122100Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpvKvdpg/pdisk_1.dat 2025-07-08T11:57:15.241117Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:15.264183Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:15.264212Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:15.269670Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6149, node 4 2025-07-08T11:57:15.317181Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:15.317194Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:15.317196Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:15.317244Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:15.384330Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:15.397445Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:16.059253Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-07-08T11:57:16.121607Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:16.250954Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmybej4dx3jb10c2m1510td, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Njk4MGY3ZmQtMTEyZTBiNDYtMTY2MmY1OTItNDZmYjRkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:57:16.317551Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmybema95kz1phdmj8hrheb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Njk4MGY3ZmQtMTEyZTBiNDYtMTY2MmY1OTItNDZmYjRkYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/"Create temporary directory "/Root/~backup_20250708T115716" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/view"Write view into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/view/permissions.pb"Process "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a"Create directory "/Root/~backup_20250708T115716/a" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b"Create directory "/Root/~backup_20250708T115716/a/b" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c"Create directory "/Root/~backup_20250708T115716/a/b/c" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/table"Copy tables: { src: "/Root/a/b/c/table", dst: "/Root/~backup_20250708T115716/a/b/c/table" }Backup table "/Root/~backup_20250708T115716/a/b/c/table" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/table"Describe table "/Root/~backup_20250708T115716/a/b/c/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/table/scheme.pb"Describe table "/Root/a/b/c/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/table/permissions.pb"Read table "/Root/~backup_20250708T115716/a/b/c/table"Write data into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/table/data_00.csv"Drop table "/Root/~backup_20250708T115716/a/b/c/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/c/permissions.pb"Remove directory "/Root/~backup_20250708T115716/a/b/c"2025-07-08T11:57:16.883079Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:57:16.890437Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/b/permissions.pb"Remove directory "/Root/~backup_20250708T115716/a/b"2025-07-08T11:57:16.963552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480 Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCialzc/a/permissions.pb"Remove directory "/Root/~backup_20250708T115716/a"2025-07-08T11:57:17.025901Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250708T115716" in database2025-07-08T11 ... WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/"Create temporary directory "/Root/~backup_20250708T115727" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115727/table" }Backup table "/Root/~backup_20250708T115727/table" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table"Describe table "/Root/~backup_20250708T115727/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/permissions.pb"Read table "/Root/~backup_20250708T115727/table"Write data into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/data_00.csv"Drop table "/Root/~backup_20250708T115727/table"2025-07-08T11:57:27.394847Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037891 not found 2025-07-08T11:57:27.394863Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037890 not found Remove temporary directory "/Root/~backup_20250708T115727" in database2025-07-08T11:57:27.464261Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/" to "/Root"2025-07-08T11:57:27.536843Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found 2025-07-08T11:57:27.536855Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037888 not found Process "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table" to "/Root/table"2025-07-08T11:57:27.543279Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-07-08T11:57:27.579963Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T11:57:27.611032Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T11:57:27.626030Z node 16 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Restore ACL "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpCAHvFF/table/permissions.pb"2025-07-08T11:57:27.725816Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-07-08T11:57:28.652093Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7524678972683178267:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:28.652189Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpDto9l6/pdisk_1.dat 2025-07-08T11:57:28.850035Z node 19 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23326, node 19 2025-07-08T11:57:28.913762Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:28.913774Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:28.913776Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:28.913825Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1693 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T11:57:28.963374Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:28.963407Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:28.967835Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:57:28.977185Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:57:29.218293Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/"Create temporary directory "/Root/~backup_20250708T115729" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115729/table" }Backup table "/Root/~backup_20250708T115729/table" to "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table"Describe table "/Root/~backup_20250708T115729/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/permissions.pb"Read table "/Root/~backup_20250708T115729/table"2025-07-08T11:57:29.649182Z node 19 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Write data into "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/data_00.csv"Drop table "/Root/~backup_20250708T115729/table"2025-07-08T11:57:29.732423Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037891 not found 2025-07-08T11:57:29.732435Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037892 not found 2025-07-08T11:57:29.732437Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037893 not found Remove temporary directory "/Root/~backup_20250708T115729" in database2025-07-08T11:57:29.789567Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/" to "/Root"2025-07-08T11:57:29.852030Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037890 not found 2025-07-08T11:57:29.852044Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037888 not found 2025-07-08T11:57:29.852047Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037889 not found Process "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table" to "/Root/table"2025-07-08T11:57:29.907304Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-07-08T11:57:30.126860Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T11:57:30.166690Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T11:57:30.198863Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-07-08T11:57:30.269840Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-07-08T11:57:30.314087Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037897 not found 2025-07-08T11:57:30.314098Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037898 not found Restore ACL "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000adb/r3tmp/tmpGkFkvN/table/permissions.pb"2025-07-08T11:57:30.434343Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> TPDiskRaces::OwnerKilledWhileReadingLog [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[weak_field-weak_field_rest--Results] >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> test.py::test[insert-select_relabel-default.txt-Results] [GOOD] >> test.py::test[insert-unique_distinct_hints--ForceBlocks] >> test.py::test[key_filter-calc_dependent-default.txt-Results] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] >> test.py::test[window-win_func_aggr_hist--Results] [GOOD] >> test.py::test[window-win_func_first_last_rev--ForceBlocks] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> test.py::test[expr-empty_iterator--ForceBlocks] [GOOD] >> test.py::test[expr-empty_iterator--Results] >> test.py::test[pg-tpcds-q65-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-pullup_inner--Results] [GOOD] |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state [GOOD] >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] >> BackupRestore::RestoreKesusResources |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q13-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-07-08T11:57:32.408364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678990428473323:2235];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:32.515488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a49/r3tmp/tmpMo6vSd/pdisk_1.dat 2025-07-08T11:57:32.577320Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:32.625551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:32.625587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:4999 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-07-08T11:57:32.629516Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:57:32.629571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:32.635393Z node 1 :TX_PROXY DEBUG: actor# [1:7524678990428473141:2087] Handle TEvNavigate describe path dc-1 2025-07-08T11:57:32.635413Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678990428473813:2422] HANDLE EvNavigateScheme dc-1 2025-07-08T11:57:32.635442Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7524678990428473454:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:57:32.635453Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7524678990428473454:2157], path# /dc-1, domainOwnerId# 72057594046644480 2025-07-08T11:57:32.635621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-07-08T11:57:32.636056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473086:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7524678990428473820:2423] 2025-07-08T11:57:32.636081Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473086:2052] Subscribe: subscriber# [1:7524678990428473820:2423], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:32.636098Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473089:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7524678990428473821:2423] 2025-07-08T11:57:32.636103Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473089:2055] Subscribe: subscriber# [1:7524678990428473821:2423], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:32.636116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473820:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473086:2052] 2025-07-08T11:57:32.636122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473821:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473089:2055] 2025-07-08T11:57:32.636133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473817:2423] 2025-07-08T11:57:32.636140Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473818:2423] 2025-07-08T11:57:32.636150Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7524678990428473814:2423][/dc-1] Set up state: owner# [1:7524678990428473454:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-07-08T11:57:32.636183Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473819:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473816:2423], cookie# 1 2025-07-08T11:57:32.636186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473820:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473817:2423], cookie# 1 2025-07-08T11:57:32.636189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473821:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473818:2423], cookie# 1 2025-07-08T11:57:32.636195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473086:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7524678990428473820:2423] 2025-07-08T11:57:32.636198Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473086:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473820:2423], cookie# 1 2025-07-08T11:57:32.636203Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473089:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7524678990428473821:2423] 2025-07-08T11:57:32.636205Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473089:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473821:2423], cookie# 1 2025-07-08T11:57:32.637043Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473083:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7524678990428473819:2423] 2025-07-08T11:57:32.637059Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473083:2049] Subscribe: subscriber# [1:7524678990428473819:2423], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:32.637075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473083:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7524678990428473819:2423], cookie# 1 2025-07-08T11:57:32.637089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473820:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473086:2052], cookie# 1 2025-07-08T11:57:32.637094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473821:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473089:2055], cookie# 1 2025-07-08T11:57:32.637100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473819:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473083:2049] 2025-07-08T11:57:32.637104Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678990428473819:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473083:2049], cookie# 1 2025-07-08T11:57:32.637111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473817:2423], cookie# 1 2025-07-08T11:57:32.637117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-07-08T11:57:32.637120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473818:2423], cookie# 1 2025-07-08T11:57:32.637123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-07-08T11:57:32.637128Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7524678990428473816:2423] 2025-07-08T11:57:32.637138Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7524678990428473814:2423][/dc-1] Path was already updated: owner# [1:7524678990428473454:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-07-08T11:57:32.637142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7524678990428473816:2423], cookie# 1 2025-07-08T11:57:32.637143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678990428473814:2423][/dc-1] Unexpected sync response: sender# [1:7524678990428473816:2423], cookie# 1 2025-07-08T11:57:32.637199Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473083:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7524678990428473819:2423] 2025-07-08T11:57:32.646639Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7524678990428473454:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-07-08T11:57:32.646739Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7524678990428473454:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 ... :2423] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751975852732 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T11:57:33.387311Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7524678990428473454:2157], cacheItem# { Subscriber: { Subscriber: [1:7524678990428473814:2423] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751975852732 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-07-08T11:57:33.387335Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7524678994723441438:2651], recipient# [1:7524678994723441437:2650], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:33.387338Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678994723441437:2650] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-07-08T11:57:33.387351Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678994723441437:2650] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-07-08T11:57:33.387902Z node 1 :TX_PROXY DEBUG: Actor# [1:7524678994723441437:2650] Handle TEvDescribeSchemeResult Forward to# [1:7524678994723441436:2649] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751975852732 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-07-08T11:57:33.401009Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:33.521014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7524678990428473454:2157], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:57:33.521045Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7524678990428473454:2157], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-07-08T11:57:33.521210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-07-08T11:57:33.521267Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473089:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7524678994723441448:2654] 2025-07-08T11:57:33.521271Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473089:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-07-08T11:57:33.521289Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473089:2055] Subscribe: subscriber# [1:7524678994723441448:2654], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:33.521302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678994723441448:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678990428473089:2055] 2025-07-08T11:57:33.521308Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678994723441445:2654] 2025-07-08T11:57:33.521320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473089:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7524678994723441448:2654] 2025-07-08T11:57:33.521324Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473083:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7524678994723441446:2654] 2025-07-08T11:57:33.521325Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473083:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-07-08T11:57:33.521329Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473083:2049] Subscribe: subscriber# [1:7524678994723441446:2654], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:33.521334Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473086:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7524678994723441447:2654] 2025-07-08T11:57:33.521336Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473086:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-07-08T11:57:33.521340Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7524678990428473086:2052] Subscribe: subscriber# [1:7524678994723441447:2654], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-07-08T11:57:33.521345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678994723441446:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678990428473083:2049] 2025-07-08T11:57:33.521348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7524678994723441447:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678990428473086:2052] 2025-07-08T11:57:33.521352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678994723441443:2654] 2025-07-08T11:57:33.521358Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7524678990428473454:2157], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-07-08T11:57:33.521362Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7524678994723441444:2654] 2025-07-08T11:57:33.521367Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7524678994723441442:2654][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7524678990428473454:2157], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-07-08T11:57:33.521374Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7524678990428473454:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-07-08T11:57:33.521387Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7524678990428473454:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7524678994723441442:2654] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T11:57:33.521403Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7524678990428473454:2157], cacheItem# { Subscriber: { Subscriber: [1:7524678994723441442:2654] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T11:57:33.521416Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7524678994723441449:2655], recipient# [1:7524678994723441441:2276], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:57:33.521430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473083:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7524678994723441446:2654] 2025-07-08T11:57:33.521433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7524678990428473086:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7524678994723441447:2654] >> test.py::test[binding-table_filter_strict_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> LocalTableWriter::ConsistentWrite >> test.py::test[select-sampleselect-1000-Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> LocalTableWriter::WriteTable |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-pullup_inner--Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::ConsistentWrite [GOOD] >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-07-08T11:57:22.712100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678944952338504:2231];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:22.712145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmppZYXzF/pdisk_1.dat 2025-07-08T11:57:22.845633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:22.858007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:22.858029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:22.859443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12280, node 1 2025-07-08T11:57:22.911009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:22.911028Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:22.911030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:22.911086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:22.962582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:22.972195Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/"Create temporary directory "/Root/~backup_20250708T115723" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic"Backup topic "/Root/topic" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic"Write topic into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic/create_topic.pb"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115723" in database2025-07-08T11:57:23.512373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:23.518153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T11:57:23.522437Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-07-08T11:57:23.522454Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-07-08T11:57:23.522456Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-07-08T11:57:23.530761Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-07-08T11:57:23.530790Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-07-08T11:57:23.530794Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found Restore "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic"Restore topic "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic" to "/Root/topic"Read topic from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic/create_topic.pb"Created "/Root/topic"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic" to "/Root/topic"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpo6muHw/topic/permissions.pb"2025-07-08T11:57:23.568530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 Restore completed successfully2025-07-08T11:57:23.713459Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:24.499926Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524678954098587487:2231];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:24.500041Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmp5BYFWC/pdisk_1.dat 2025-07-08T11:57:24.579562Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23130, node 4 2025-07-08T11:57:24.601156Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:24.601170Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:24.601172Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:24.601225Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:24.601343Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:24.601361Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:24.606257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:24.642157Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:24.645755Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:24.669379Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/"Create temporary directory "/Root/~backup_20250708T115724" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus"Backup coordination node "/Root/kesus" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus"Write coordination node into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus/create_coordination_node.pb"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115724" in database2025-07-08T11:57:24.989502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:57:24.993708Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:24.995831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T11:57:25.004556Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-07-08T11:57:25.006075Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Restore "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus"Restore coordination node "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus" to "/Root/kesus"Read coordination node from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus/create_coordination_node.pb"2025-07-08T11:57:25.009355Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715662:0, at schemeshard: 72057594046644480 Restore coordination node's resources "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus" to "/Root/kesus"Created "/Root/kesus"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus" to "/Root/kesus"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpxdOS9k/kesus/permissi ... N: will try to initialize from file: (empty maybe) 2025-07-08T11:57:32.293251Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:32.293304Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:32.353409Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:32.361144Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:32.730595Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:32.736282Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/"Create temporary directory "/Root/~backup_20250708T115732" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable"Write external table into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115732" in database2025-07-08T11:57:33.009457Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715661:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/" to "/Root"Resolved db base path: "/Root"2025-07-08T11:57:33.080376Z node 10 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Restore folder "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource/create_external_data_source.sql"2025-07-08T11:57:33.085856Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715664:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalDataSource/permissions.pb"2025-07-08T11:57:33.113300Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable"Restore external table "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable/create_external_table.sql"2025-07-08T11:57:33.121149Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpiYxpmJ/externalTable/permissions.pb"2025-07-08T11:57:33.145295Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480 Restore completed successfully test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpNKnVYQ/pdisk_1.dat 2025-07-08T11:57:33.988851Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7524678994428643839:2089];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:33.990325Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:57:34.044122Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22119, node 13 2025-07-08T11:57:34.089278Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:34.089289Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:34.089290Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:34.089335Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:34.101632Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:34.101658Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:25635 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T11:57:34.121560Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:34.148965Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:34.745492Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/"Create temporary directory "/Root/~backup_20250708T115734" in database2025-07-08T11:57:34.804840Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115734" in database2025-07-08T11:57:34.866232Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource/create_external_data_source.sql"2025-07-08T11:57:34.990591Z node 13 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:35.034204Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715662:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a8b/r3tmp/tmpLoSfiU/externalDataSource/permissions.pb"2025-07-08T11:57:35.060666Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 Restore completed successfully >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> LocalTableWriter::DataAlongWithHeartbeat >> test.py::test[blocks-interval_add_interval--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_add_interval--Results] |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |62.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |62.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |62.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |62.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-07-08T11:57:35.657086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679003207030742:2081];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:35.658792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c4e/r3tmp/tmp8kxg9z/pdisk_1.dat 2025-07-08T11:57:35.758299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:35.758324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:35.759372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:35.761384Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22962 TServer::EnableGrpc on GrpcPort 19615, node 1 2025-07-08T11:57:35.885434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:35.885451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:35.885453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:35.885580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22962 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:35.991684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:36.001544Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:36.049905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751975856127 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-07-08T11:57:36.086201Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handshake: worker# [1:7524679007501998563:2288] 2025-07-08T11:57:36.086280Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:36.086316Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:36.086322Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Send handshake: worker# [1:7524679007501998563:2288] 2025-07-08T11:57:36.086503Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:36.087413Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-07-08T11:57:36.087446Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-07-08T11:57:36.087482Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:36.087487Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.087502Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-07-08T11:57:36.088349Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:36.088365Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.088373Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-07-08T11:57:36.088467Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:36.088540Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:36.088617Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-07-08T11:57:36.088638Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-07-08T11:57:36.088657Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-07-08T11:57:36.090503Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:36.090517Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.090523Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-07-08T11:57:36.090616Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:36.090638Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-07-08T11:57:36.090653Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-07-08T11:57:36.092180Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679007501998657:2347] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:36.092192Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.092198Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-07-08T11:57:36.092279Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679007501998653:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-07-08T11:57:35.763359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679000896404192:2223];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:35.782917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c44/r3tmp/tmpQ2Iorb/pdisk_1.dat 2025-07-08T11:57:35.808489Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:35.829130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:35.829165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:12938 TServer::EnableGrpc on GrpcPort 7841, node 1 2025-07-08T11:57:35.830224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:35.850814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:35.850843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:35.850844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:35.850908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12938 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:57:35.975536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:57:35.978021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:35.978894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751975856085 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-07-08T11:57:36.050470Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handshake: worker# [1:7524679000896404558:2286] 2025-07-08T11:57:36.050535Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:36.050584Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:36.050592Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Send handshake: worker# [1:7524679000896404558:2286] 2025-07-08T11:57:36.050727Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:36.050763Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-07-08T11:57:36.050798Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679005191371949:2346] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:36.050803Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.050817Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679005191371949:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-07-08T11:57:36.057242Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679005191371949:2346] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:36.057268Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:36.057280Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679005191371946:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> test.py::test[expr-empty_iterator--Results] [GOOD] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_int64--ForceBlocks] >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> test.py::test[window-win_func_lead_lag_opt--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-07-08T11:57:37.332575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679012471816851:2233];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:37.332675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c26/r3tmp/tmpBnMXLe/pdisk_1.dat 2025-07-08T11:57:37.489209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:37.489240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:37.490704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:37.493190Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12299 TServer::EnableGrpc on GrpcPort 64707, node 1 2025-07-08T11:57:37.603696Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:37.603714Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:37.603715Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:37.603764Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:37.668725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:37.779923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975857891 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-07-08T11:57:37.862637Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handshake: worker# [1:7524679012471817305:2348] 2025-07-08T11:57:37.862714Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:37.862757Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:37.862764Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Send handshake: worker# [1:7524679012471817305:2348] 2025-07-08T11:57:37.862940Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:37.863877Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-07-08T11:57:37.863906Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-07-08T11:57:37.863951Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679012471817308:2347] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:37.863956Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:37.863969Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679012471817308:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-07-08T11:57:37.865381Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679012471817308:2347] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:37.865396Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:37.865404Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679012471817304:2347] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> test.py::test[join-premap_common_multiparents--ForceBlocks] [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop |62.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part6/pytest >> test.py::test[expr-empty_iterator--Results] [GOOD] |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpchGenerator::test_s1_state_and_parts [GOOD] >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> test.py::test[pg-tpcds-q87-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q87-default.txt-Results] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> test.py::test[blocks-interval_add_interval--Results] [GOOD] >> test.py::test[blocks-interval_mul--ForceBlocks] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest |62.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[hor_join-runtime_dep-default.txt-ForceBlocks] [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false |62.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[join-premap_common_multiparents--ForceBlocks] [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> test.py::test[hor_join-out_hor_join-default.txt-ForceBlocks] [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:37.611543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:37.611570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.611575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:37.611580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:37.611584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:37.611588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:37.611595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.611606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:37.611690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:37.623890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:37.623910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:37.627608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:37.627663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:37.627714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:37.628971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:37.629013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:37.629120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.629279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:37.629971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:37.630249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:37.630281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.630287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:37.630313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.631511Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:37.649862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:37.649942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.650010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:37.650067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:37.650079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.650903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.650931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:37.650980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.650990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:37.650996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:37.651001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:37.651365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651376Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:37.651680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.651703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.652272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:37.652603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:37.652642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:37.652819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.652840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:37.652848Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.652908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:37.652916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.652942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:37.652969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:37.653319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.653367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:37.653383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:37.653399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.653403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.653411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:37.653420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:37.653429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:37.653438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:37.653443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:37.653447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:37.653811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:37.653826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... gePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:41.006695Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.006731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:41.006765Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:41.006771Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007147Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:41.007170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007178Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:41.007182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:41.007186Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:41.007451Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007473Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007478Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:41.007763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007773Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.007779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.007785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.007851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:41.008133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:41.008163Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:41.008282Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.008300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 30064773230 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:41.008308Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.008357Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:41.008363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.008385Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.008394Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:41.008695Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.008703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.008727Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.008731Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:41.008785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.008791Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:41.008801Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.008805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.008809Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.008812Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.008815Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:41.008822Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.008826Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:41.008830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:41.008838Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:41.008843Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:41.008847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:41.008929Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:41.008937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:41.008940Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-07-08T11:57:41.008944Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-07-08T11:57:41.008963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.008973Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-07-08T11:57:41.009364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-07-08T11:57:41.009426Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-07-08T11:57:41.009534Z node 7 :TX_PROXY DEBUG: actor# [7:268:2259] Bootstrap 2025-07-08T11:57:41.010868Z node 7 :TX_PROXY DEBUG: actor# [7:268:2259] Become StateWork (SchemeCache [7:273:2264]) 2025-07-08T11:57:41.011453Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:41.011481Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-07-08T11:57:41.011486Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-07-08T11:57:41.011506Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, at schemeshard: 72057594046678944 2025-07-08T11:57:41.011511Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, at schemeshard: 72057594046678944 2025-07-08T11:57:41.011617Z node 7 :TX_PROXY DEBUG: actor# [7:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:57:41.012132Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:41.012154Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-07-08T11:57:41.012220Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> test.py::test[blocks-minmax_strings--ForceBlocks] [GOOD] >> test.py::test[blocks-minmax_strings--Results] >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret |62.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> LocalTableWriter::ApplyInCorrectOrder >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive |62.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:37.609874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:37.609900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.609905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:37.609910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:37.609916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:37.609920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:37.609932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.609946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:37.610014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:37.623638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:37.623662Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:37.627631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:37.627674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:37.627713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:37.628907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:37.628968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:37.629069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.629445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:37.630115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:37.630312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.630333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:37.630339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.630344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:37.630364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.631436Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:37.650499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:37.650561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.650615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:37.650650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:37.650660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:37.651238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:37.651250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:37.651254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:37.651550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651558Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:37.651820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.651831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.651837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.652413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:37.652707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:37.652734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:37.652875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.652894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:37.652902Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.652966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:37.652973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.652993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:37.653003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:37.653334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.653370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:37.653384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.653389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:37.653399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.653404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.653412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:37.653420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.653425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:37.653429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:37.653438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:37.653443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:37.653448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:37.653833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:37.653845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... nerId: 72057594046678944, LocalPathId: 2], version: 3 2025-07-08T11:57:41.715851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:57:41.715860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-07-08T11:57:41.716289Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-07-08T11:57:41.716319Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-07-08T11:57:41.716452Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.716471Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 30064773230 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:41.716478Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-07-08T11:57:41.716491Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.716495Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:41.716502Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-07-08T11:57:41.716581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:57:41.716843Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:57:41.717091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.717102Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:41.717126Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-07-08T11:57:41.717147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.717155Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T11:57:41.717476Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.717485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.717507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:41.717528Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.717533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-07-08T11:57:41.717537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-07-08T11:57:41.717586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.717592Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-07-08T11:57:41.717597Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-07-08T11:57:41.717691Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:41.717700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:41.717704Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:57:41.717708Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T11:57:41.717712Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:41.717806Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:41.717816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:41.717820Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:57:41.717827Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T11:57:41.717831Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:57:41.717840Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-07-08T11:57:41.718321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.718333Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T11:57:41.718345Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:57:41.718349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:41.718354Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:57:41.718357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:41.718361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-07-08T11:57:41.718368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:41.718373Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:57:41.718377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:57:41.718388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:57:41.718478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:41.718484Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:41.718498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:57:41.718564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:41.718570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:41.718578Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.718732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:57:41.718832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:57:41.719212Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:57:41.719227Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:57:41.719274Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T11:57:41.719281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T11:57:41.719350Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:57:41.719369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:57:41.719374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:340:2331] TestWaitNotification: OK eventTxId 102 2025-07-08T11:57:41.719441Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:41.719468Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 36us result status StatusPathDoesNotExist 2025-07-08T11:57:41.719500Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> test.py::test[window-win_func_first_last_rev--ForceBlocks] [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> test.py::test[key_filter-dict_contains-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-dict_contains-default.txt-Results] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TGroupMapperTest::NonUniformCluster [GOOD] >> LocalTableWriter::WaitTxIds >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> LocalTableWriter::SupportedTypes >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> LocalTableWriter::SupportedTypes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:38.704275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:38.704302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.704307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:38.704312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:38.704317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:38.704320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:38.704510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.704523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:38.712726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:38.735863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:38.735882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:38.766761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:38.766849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:38.766896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:38.773147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:38.773201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:38.773509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.773889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:38.777085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.777139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:38.777346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:38.777353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.777368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:38.777374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:38.777379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:38.777406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.779370Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:38.900925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:38.901012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.901080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:38.901122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:38.901134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.913362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.913406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:38.913462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.913474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:38.913479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:38.913485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:38.917521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.917541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:38.917548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:38.923318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.923339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.923347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.923356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.924432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:38.942523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:38.942606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:38.942835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.942882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:38.942893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.942988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:38.942998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.943040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:38.943056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:38.945236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:38.945247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:38.945298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.945303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:38.945314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.945321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:38.945333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:38.945341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.945345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:38.945348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.945352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:38.945357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.945361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:38.945365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:38.945377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:38.945382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:38.945386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:38.945817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:38.945834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667048Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp:1125, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-07-08T11:57:42.667242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667251Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:42.667257Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-07-08T11:57:42.667260Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-07-08T11:57:42.667548Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667558Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667562Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-07-08T11:57:42.667835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667843Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.667849Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-07-08T11:57:42.667855Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-07-08T11:57:42.667878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:42.668125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-07-08T11:57:42.668148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-07-08T11:57:42.668201Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 30064773230 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668223Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-07-08T11:57:42.668269Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-07-08T11:57:42.668275Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-07-08T11:57:42.668288Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:42.668311Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:391:2362], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668664Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-07-08T11:57:42.668680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-07-08T11:57:42.668707Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-07-08T11:57:42.668756Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668761Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:42.668783Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668787Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-07-08T11:57:42.668845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.668851Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-07-08T11:57:42.668855Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-07-08T11:57:42.669016Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:42.669030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:42.669034Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:42.669038Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-07-08T11:57:42.669042Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-07-08T11:57:42.669055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-07-08T11:57:42.669551Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-07-08T11:57:42.669564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:42.669574Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:391:2362], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:42.669593Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-07-08T11:57:42.669596Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-07-08T11:57:42.669611Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-07-08T11:57:42.669614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:490:2432], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-07-08T11:57:42.669700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:42.669705Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-07-08T11:57:42.669712Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:42.669715Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:42.669717Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:42.669719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:42.669723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-07-08T11:57:42.669728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:42.669732Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:57:42.669736Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:57:42.669746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:42.669789Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-07-08T11:57:42.670068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:42.670084Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-07-08T11:57:42.670383Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T11:57:42.670392Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T11:57:42.670449Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T11:57:42.670465Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:57:42.670470Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:572:2512] TestWaitNotification: OK eventTxId 103 |62.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> test.py::test[view-init_view_after_eval-default.txt-Results] [GOOD] >> test.py::test[view-standalone_view_lambda--Results] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:38.954472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:38.954498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.954502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:38.954507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:38.954512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:38.954515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:38.954527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.954540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:38.954617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:38.968079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:38.968095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:38.980425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:38.980477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:38.980521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:38.983568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:38.983614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:38.983715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.983845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:38.991239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.991291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:38.991673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:38.991681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.991705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:38.991711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:38.991717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:38.991742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.996787Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:39.074932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:39.074998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.075056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:39.075109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:39.075123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.075810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:39.075838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:39.075878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.075887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:39.075892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:39.075896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:39.076297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.076309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:39.076313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:39.076637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.076649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.076654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:39.076662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:39.077329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:39.078152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:39.078187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:39.078350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:39.078374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:39.078381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:39.078450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:39.078457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:39.078481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:39.078494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:39.079356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:39.079365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:39.079402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:39.079409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:39.079420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.079426Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:39.079436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:39.079440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:39.079444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:39.079448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:39.079452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:39.079457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:39.079461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:39.079469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:39.079478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:39.079483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:39.079487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:39.079865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:39.079885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... NFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:57:43.342914Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:57:43.342918Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:57:43.342921Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:57:43.342925Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-07-08T11:57:43.342930Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:57:43.342935Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T11:57:43.342938Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T11:57:43.342968Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:43.343119Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:57:43.343291Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T11:57:43.347318Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:43.347382Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:57:43.347506Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-07-08T11:57:43.347548Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T11:57:43.347573Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:57:43.347634Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-07-08T11:57:43.347879Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T11:57:43.347898Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:57:43.348087Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409547 2025-07-08T11:57:43.349007Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 Forgetting tablet 72075186234409546 2025-07-08T11:57:43.349191Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-07-08T11:57:43.349240Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-07-08T11:57:43.349302Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 Forgetting tablet 72075186234409548 2025-07-08T11:57:43.353197Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-07-08T11:57:43.353254Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:57:43.353372Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:43.353378Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:43.353398Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:57:43.353508Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:43.353513Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:43.353524Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:43.354036Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T11:57:43.354047Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T11:57:43.354063Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T11:57:43.354067Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-07-08T11:57:43.354075Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T11:57:43.354079Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-07-08T11:57:43.354088Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-07-08T11:57:43.354092Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-07-08T11:57:43.354402Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354419Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T11:57:43.354487Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T11:57:43.354493Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T11:57:43.354567Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354585Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:57:43.354589Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:791:2700] TestWaitNotification: OK eventTxId 105 2025-07-08T11:57:43.354656Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354687Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 42us result status StatusPathDoesNotExist 2025-07-08T11:57:43.354719Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354765Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354775Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 11us result status StatusPathDoesNotExist 2025-07-08T11:57:43.354789Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354830Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:43.354848Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2025-07-08T11:57:43.354902Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-07-08T11:57:43.101037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679037248859346:2144];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:43.134238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bf5/r3tmp/tmpoIyp5I/pdisk_1.dat 2025-07-08T11:57:43.175417Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8271 2025-07-08T11:57:43.242815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:43.242854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:43.243679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29921, node 1 2025-07-08T11:57:43.297213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:43.297225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:43.297227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:43.297268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:43.376387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:57:43.381386Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:43.382335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751975863449 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-07-08T11:57:43.457974Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handshake: worker# [1:7524679037248859803:2286] 2025-07-08T11:57:43.458073Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:43.458115Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:43.458121Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Send handshake: worker# [1:7524679037248859803:2286] 2025-07-08T11:57:43.458230Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:43.459156Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-07-08T11:57:43.459182Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-07-08T11:57:43.459234Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037248859896:2346] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:43.459239Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.459251Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037248859896:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-07-08T11:57:43.461484Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037248859896:2346] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:43.461514Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.461522Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-07-08T11:57:43.465083Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:43.465255Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-07-08T11:57:43.465275Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-07-08T11:57:43.465302Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037248859896:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-07-08T11:57:43.473177Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037248859896:2346] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:43.473200Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.473209Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037248859893:2346] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:38.148752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:38.148778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.148783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:38.148789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:38.148795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:38.148799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:38.148933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:38.148999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:38.149881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:38.202604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:38.202623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:38.228549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:38.228611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:38.228652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:38.231455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:38.231507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:38.231868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.232190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:38.235573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.235640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:38.235832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:38.235841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.235856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:38.235863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:38.235869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:38.235891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.237809Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:38.300570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:38.300638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.300694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:38.300737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:38.300751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.301443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.301473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:38.301508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.301516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:38.301521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:38.301525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:38.301941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.301956Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:38.301961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:38.302440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.302452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.302459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.302465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.303051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:38.303404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:38.303441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:38.303616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:38.303638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:38.303644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.303714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:38.303722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:38.303747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:38.303758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:38.304140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:38.304147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:38.304175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:38.304180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:38.304190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:38.304195Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:38.304205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:38.304212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.304216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:38.304219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.304223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:38.304227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:38.304232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:38.304235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:38.304244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:38.304249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:38.304253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:38.304605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:38.304621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... EvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-07-08T11:57:43.434968Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-07-08T11:57:43.435379Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:57:43.435426Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T11:57:43.435431Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T11:57:43.435481Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:57:43.435486Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-07-08T11:57:43.435491Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:57:43.473611Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-07-08T11:57:43.473658Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-07-08T11:57:43.473666Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-07-08T11:57:43.473675Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-07-08T11:57:43.473682Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-07-08T11:57:43.474065Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474102Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474107Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474112Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-07-08T11:57:43.474118Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-07-08T11:57:43.474146Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:43.474429Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-07-08T11:57:43.474457Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-07-08T11:57:43.474519Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474546Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 130 RawX2: 34359740522 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474551Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-07-08T11:57:43.474608Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-07-08T11:57:43.474614Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-07-08T11:57:43.474633Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:43.474651Z node 8 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:362:2337], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474986Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:43.474996Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:43.475030Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:43.475034Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T11:57:43.475112Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.475118Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-07-08T11:57:43.475122Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-07-08T11:57:43.475182Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:43.475191Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:57:43.475195Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:57:43.475199Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-07-08T11:57:43.475203Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-07-08T11:57:43.475216Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-07-08T11:57:43.476000Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.476013Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T11:57:43.476024Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:57:43.476028Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:43.476032Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:57:43.476035Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:43.476039Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-07-08T11:57:43.476050Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:304:2295] message: TxId: 102 2025-07-08T11:57:43.476055Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:57:43.476060Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:57:43.476066Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:57:43.476094Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:43.476196Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:57:43.476445Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:57:43.476453Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:512:2451] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-07-08T11:57:43.477117Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:43.477145Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-07-08T11:57:43.477150Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-07-08T11:57:43.477170Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-07-08T11:57:43.477176Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-07-08T11:57:43.477568Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:43.477595Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> LocalTableWriter::DecimalKeys >> test.py::test[pg-tpcds-q87-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] >> StreamCreator::Basic >> LocalTableWriter::WaitTxIds [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-07-08T11:57:43.593308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679039210273487:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:43.593324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001be4/r3tmp/tmptTxEf1/pdisk_1.dat 2025-07-08T11:57:43.669455Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18338 2025-07-08T11:57:43.739276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:43.739303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:43.740367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18265, node 1 2025-07-08T11:57:43.785146Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:43.785162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:43.785163Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:43.785205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:43.827621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:43.835146Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:57:43.840006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975863904 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-07-08T11:57:43.865802Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handshake: worker# [1:7524679039210274026:2286] 2025-07-08T11:57:43.865910Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:43.865982Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:43.865989Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Send handshake: worker# [1:7524679039210274026:2286] 2025-07-08T11:57:43.866246Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:43.866388Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-07-08T11:57:43.866465Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679039210274119:2345] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:43.866471Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.866511Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679039210274119:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-07-08T11:57:43.881350Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679039210274119:2345] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:43.881380Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.881392Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679039210274115:2345] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:41.189562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:41.189599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:41.189606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:41.189613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:41.189620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:41.189624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:41.189634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:41.189654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:41.189750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:41.233775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:41.233793Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:41.255707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:41.255781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:41.255828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:41.263792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:41.263845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:41.263946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.264078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:41.273618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.273688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:41.273919Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.273928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.273944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:41.273951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.273956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:41.273984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.275500Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:41.379624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:41.379694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.379754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:41.379798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:41.379809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.381607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.381636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:41.381678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.381690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:41.381695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:41.381700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:41.382240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.382252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:41.382257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:41.388425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.388438Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.388444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.388449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.389256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:41.389689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:41.389725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:41.389887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.389911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:41.389921Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.389987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:41.389996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.390021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.390034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:41.390727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.390737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.390765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.390771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:41.390781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.390787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:41.390799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.390803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.390808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.390811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.390815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:41.390821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.390825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:41.390828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:41.390839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:41.390845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:41.390849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:41.391304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:41.391320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ode 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-07-08T11:57:44.068070Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:44.068075Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-07-08T11:57:44.068079Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-07-08T11:57:44.068240Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.068248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.068252Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:44.068255Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T11:57:44.068259Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:44.068340Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.068347Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.068350Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:44.068355Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T11:57:44.068358Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-07-08T11:57:44.068364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-07-08T11:57:44.068755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-07-08T11:57:44.068763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-07-08T11:57:44.068765Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-07-08T11:57:44.068768Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-07-08T11:57:44.068809Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:44.068815Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-07-08T11:57:44.068823Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:44.068826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.068830Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:44.068831Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.068834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-07-08T11:57:44.068839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.068843Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:57:44.068846Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:57:44.068871Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:44.069013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:44.069258Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T11:57:44.069293Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:44.069311Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-07-08T11:57:44.069342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:44.069393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186234409547 2025-07-08T11:57:44.069671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T11:57:44.069701Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:57:44.069761Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-07-08T11:57:44.069975Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:44.070946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T11:57:44.070998Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:57:44.071103Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 Forgetting tablet 72075186234409546 Forgetting tablet 72075186234409548 2025-07-08T11:57:44.071827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-07-08T11:57:44.071874Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:57:44.072096Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:44.072104Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:44.072129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:57:44.072251Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:44.072258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:44.072270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:44.072755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T11:57:44.072769Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T11:57:44.072788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T11:57:44.072792Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-07-08T11:57:44.072803Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T11:57:44.072818Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-07-08T11:57:44.073232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-07-08T11:57:44.073245Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-07-08T11:57:44.073261Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:57:44.073296Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T11:57:44.073357Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T11:57:44.073365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T11:57:44.073435Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T11:57:44.073457Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:57:44.073463Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:585:2525] TestWaitNotification: OK eventTxId 103 2025-07-08T11:57:44.073535Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:44.073574Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 49us result status StatusPathDoesNotExist 2025-07-08T11:57:44.073610Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent |62.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[key_filter-dict_contains-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-07-08T11:57:43.343943Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679037909193156:2245];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:43.345345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bf1/r3tmp/tmpZo2ui5/pdisk_1.dat 2025-07-08T11:57:43.405116Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22606 TServer::EnableGrpc on GrpcPort 18361, node 1 2025-07-08T11:57:43.449149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:43.449165Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:43.449166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:43.449207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22606 2025-07-08T11:57:43.473508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:43.473535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:43.474434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:43.490720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:43.493842Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:43.580436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751975863694 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-07-08T11:57:43.658462Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handshake: worker# [1:7524679037909193582:2345] 2025-07-08T11:57:43.658528Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:43.658582Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:43.658588Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Send handshake: worker# [1:7524679037909193582:2345] 2025-07-08T11:57:43.658772Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:43.659511Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-07-08T11:57:43.659535Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-07-08T11:57:43.659572Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037909193586:2344] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:43.659577Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.659587Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037909193586:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-07-08T11:57:43.661009Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037909193586:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:43.661026Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:43.661032Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-07-08T11:57:44.328062Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:57:44.663481Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-07-08T11:57:44.663537Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-07-08T11:57:44.663567Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037909193586:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-07-08T11:57:44.667437Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679037909193586:2344] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:44.667461Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:44.667470Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679037909193581:2344] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:41.639957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:41.639985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:41.639991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:41.639995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:41.640001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:41.640005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:41.640013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:41.640027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:41.640104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:41.652763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:41.652783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:41.656295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:41.656337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:41.656377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:41.657741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:41.657785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:41.657883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.658043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:41.658861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.658904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:41.659130Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.659140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.659158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:41.659166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.659172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:41.659195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.660569Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:41.678739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:41.678784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.678824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:41.678863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:41.678873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.679426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.679447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:41.679476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.679484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:41.679490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:41.679495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:41.679804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.679812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:41.679817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:41.680087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.680095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.680101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.680107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.680651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:41.680922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:41.680966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:41.681091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:41.681110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:41.681120Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.681181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:41.681187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:41.681211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:41.681223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:41.681562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:41.681568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:41.681603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:41.681608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:41.681618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:41.681623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:41.681633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.681637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.681641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:41.681644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.681648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:41.681654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:41.681658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:41.681662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:41.681671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:41.681675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:41.681679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:41.682033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:41.682049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... UG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-07-08T11:57:44.554123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:44.554129Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-07-08T11:57:44.554133Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-07-08T11:57:44.554211Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.554224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:44.554228Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:44.554233Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-07-08T11:57:44.554237Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-07-08T11:57:44.554248Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-07-08T11:57:44.555280Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-07-08T11:57:44.555296Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:44.555311Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:391:2362], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:44.555326Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-07-08T11:57:44.555330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-07-08T11:57:44.555351Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-07-08T11:57:44.555355Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:490:2432], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-07-08T11:57:44.555506Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-07-08T11:57:44.555554Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:44.555560Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-07-08T11:57:44.555572Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:44.555576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.555580Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:44.555583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.555587Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-07-08T11:57:44.555595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:44.555600Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:57:44.555604Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:57:44.555614Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:44.555661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:44.555668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T11:57:44.556135Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T11:57:44.556143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T11:57:44.556200Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T11:57:44.556214Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:57:44.556218Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:566:2506] TestWaitNotification: OK eventTxId 103 2025-07-08T11:57:44.556282Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:44.556305Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 29us result status StatusSuccess 2025-07-08T11:57:44.556376Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:44.556420Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:44.556430Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2025-07-08T11:57:44.556467Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:44.556511Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-07-08T11:57:44.556526Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 16us result status StatusSuccess 2025-07-08T11:57:44.556563Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:39.872131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:39.872157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:39.872162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:39.872169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:39.872175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:39.872180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:39.872193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:39.872206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:39.872286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:39.908704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:39.908724Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:39.943403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:39.943465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:39.943507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:39.951152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:39.951203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:39.951401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:39.951671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:39.953908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:39.953952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:39.954147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:39.954154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:39.954168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:39.954174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:39.954179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:39.954200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:39.956077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:40.133857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:40.133933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.134001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:40.134050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:40.134064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.138130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:40.138167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:40.138216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.138229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:40.138235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:40.138241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:40.141263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.141285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:40.141293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:40.141888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.141903Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.141909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:40.141918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:40.142550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:40.143962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:40.143997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:40.144158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:40.144187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:40.144199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:40.144269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:40.144279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:40.144312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:40.144324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:40.145258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:40.145271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:40.145336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:40.145344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:40.145363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:40.145373Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:40.145390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:40.145397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:40.145404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:40.145410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:40.145417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:40.145427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:40.145434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:40.145440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:40.145456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:40.145465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:40.145471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:40.145903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:40.145925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 30064773230 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:44.409771Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-07-08T11:57:44.409806Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:57:44.409810Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:57:44.409815Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:57:44.409819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:57:44.409827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:57:44.409835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T11:57:44.409855Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:351:2330], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-07-08T11:57:44.409859Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:57:44.409863Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T11:57:44.409866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T11:57:44.409872Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:44.409877Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-07-08T11:57:44.409880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-07-08T11:57:44.410201Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-07-08T11:57:44.410215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-07-08T11:57:44.410244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-07-08T11:57:44.410270Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:44.410274Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:44.410296Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:44.410300Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-07-08T11:57:44.410401Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:57:44.410409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:57:44.410413Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:57:44.410416Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-07-08T11:57:44.410420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:57:44.410432Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-07-08T11:57:44.410863Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-07-08T11:57:44.410875Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:44.410890Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:351:2330], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:57:44.410905Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-07-08T11:57:44.410909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-07-08T11:57:44.410924Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-07-08T11:57:44.410928Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:442:2394], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-07-08T11:57:44.411010Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:57:44.411017Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:44.411035Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T11:57:44.411078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T11:57:44.411086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T11:57:44.411140Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-07-08T11:57:44.411150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T11:57:44.411154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:540:2490] TestWaitNotification: OK eventTxId 104 2025-07-08T11:57:44.411213Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:44.411233Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 26us result status StatusSuccess 2025-07-08T11:57:44.411306Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:44.411362Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-07-08T11:57:44.411373Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 12us result status StatusSuccess 2025-07-08T11:57:44.411406Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> LocalTableWriter::DecimalKeys [GOOD] >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> StreamCreator::Basic [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> test.py::test[window-win_func_first_last_rev--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--ForceBlocks] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> test.py::test[window-win_func_lead_lag_opt--Results] [GOOD] |62.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> BackupRestore::PrefixedVectorIndex >> test.py::test[blocks-add_int64--ForceBlocks] [GOOD] >> test.py::test[blocks-add_int64--Results] |62.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |62.6%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:42.994035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:42.994063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:42.994068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:42.994073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:42.994078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:42.994082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:42.994091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:42.994102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:42.994183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:43.003590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:43.003610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:43.006725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:43.006773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:43.006802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:43.007930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:43.007965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:43.008037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:43.008139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:43.008704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:43.008742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:43.008892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:43.008897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:43.008907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:43.008912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:43.008916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:43.008932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.009911Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:43.023164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:43.023219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.023269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:43.023302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:43.023310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.026163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:43.026203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:43.026255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.026266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:43.026271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:43.026276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:43.029601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.029628Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:43.029636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:43.031406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.031426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.031432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:43.031438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:43.031902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:43.032374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:43.032408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:43.032537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:43.032557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:43.032564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:43.032616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:43.032622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:43.032646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:43.032655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:43.033065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:43.033073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:43.033114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:43.033119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:43.033128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:43.033135Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:43.033146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:43.033150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:43.033155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:43.033158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:43.033162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:43.033167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:43.033172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:43.033175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:43.033186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:43.033192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:43.033196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:43.033599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:43.033614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ve::TEvDeleteOwnerTabletsReply> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:46.273715Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:46.273721Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:46.273758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:57:46.273781Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:46.273785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-07-08T11:57:46.273790Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:205:2207], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-07-08T11:57:46.273800Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:46.273805Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-07-08T11:57:46.273810Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-07-08T11:57:46.274000Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:46.274014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:46.274018Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:46.274023Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T11:57:46.274028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:46.279154Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:46.279184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:57:46.279190Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:57:46.279196Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T11:57:46.279203Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-07-08T11:57:46.279225Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-07-08T11:57:46.289449Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T11:57:46.289474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-07-08T11:57:46.289478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T11:57:46.293048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:57:46.293073Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-07-08T11:57:46.293100Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:46.293105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:46.293111Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T11:57:46.293115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:46.293120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-07-08T11:57:46.293129Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T11:57:46.293135Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:57:46.293140Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:57:46.293192Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:57:46.293434Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:46.293577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:57:46.293621Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T11:57:46.293719Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:46.293778Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-07-08T11:57:46.294020Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:46.294818Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-07-08T11:57:46.294974Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T11:57:46.295079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T11:57:46.295126Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-07-08T11:57:46.295337Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T11:57:46.295364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-07-08T11:57:46.295667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:46.295675Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:46.295699Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:57:46.295966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:57:46.295978Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:57:46.295991Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:46.296674Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T11:57:46.296689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T11:57:46.296704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T11:57:46.296707Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-07-08T11:57:46.296820Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T11:57:46.296828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T11:57:46.296886Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:57:46.296925Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T11:57:46.297007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T11:57:46.297015Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T11:57:46.297089Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T11:57:46.297109Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:57:46.297115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:534:2484] TestWaitNotification: OK eventTxId 103 2025-07-08T11:57:46.297188Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:57:46.297227Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 53us result status StatusPathDoesNotExist 2025-07-08T11:57:46.297274Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-07-08T11:57:45.054790Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679043938975522:2167];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:45.182025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bd7/r3tmp/tmpsctITg/pdisk_1.dat 2025-07-08T11:57:45.347881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:45.347903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:45.349100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:45.354607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19916 TServer::EnableGrpc on GrpcPort 24075, node 1 2025-07-08T11:57:45.856433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:45.856444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:45.856446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:45.856486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:46.005030Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:19916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:46.114089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:46.121326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:57:46.125809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975866228 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751975866165 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975866228 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-07-08T11:57:46.198915Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T11:57:46.198942Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T11:57:46.198945Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-07-08T11:57:46.199086Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-07-08T11:57:46.425844Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1751975866228, tx_id: 281474976710658 } } } 2025-07-08T11:57:46.425917Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-07-08T11:57:46.426297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.426601Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-07-08T11:57:46.426603Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-07-08T11:57:46.448171Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-07-08T11:57:46.448184Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-07-08T11:57:46.449139Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-07-08T11:57:46.478472Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7524679048233943551:2300] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-07-08T11:57:46.486902Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-07-08T11:57:46.486913Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-07-08T11:57:46.491650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.500854Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-07-08T11:57:46.500871Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975866228 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:37.541076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:37.541103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.541108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:37.541114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:37.541120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:37.541124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:37.541137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:37.541150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:37.541238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:37.554193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:37.554216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:37.558003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:37.558076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:37.558120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:37.559468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:37.559514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:37.559615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.559778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:37.560465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.560514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:37.560726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.560735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.560751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:37.560758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.560763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:37.560787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.561981Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:37.581160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:37.581227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.581288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:37.581326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:37.581336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:37.582101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:37.582115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:37.582120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:37.582438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582447Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:37.582730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.582743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.582749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.583303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:37.583614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:37.583651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:37.583815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:37.583836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:37.583846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.583904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:37.583910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:37.583938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:37.583950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:37.584277Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:37.584284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:37.584323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:37.584327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:37.584337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:37.584343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:37.584353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.584357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.584362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:37.584365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.584370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:37.584375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:37.584380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:37.584385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:37.584393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:37.584398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:37.584402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:37.584759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:37.584771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-07-08T11:57:46.210279Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-07-08T11:57:46.210468Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-07-08T11:57:46.210478Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-07-08T11:57:46.210493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-07-08T11:57:46.210502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-07-08T11:57:46.210923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-07-08T11:57:46.210947Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-07-08T11:57:46.210983Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-07-08T11:57:46.210988Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-07-08T11:57:46.211033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-07-08T11:57:46.211047Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-07-08T11:57:46.211051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-07-08T11:57:46.211055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-07-08T11:57:46.211166Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-07-08T11:57:46.211174Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-07-08T11:57:46.211213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-07-08T11:57:46.211336Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-07-08T11:57:46.211348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-07-08T11:57:46.211351Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-07-08T11:57:46.211356Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-07-08T11:57:46.211360Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-07-08T11:57:46.211557Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-07-08T11:57:46.211568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-07-08T11:57:46.211571Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-07-08T11:57:46.211574Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-07-08T11:57:46.211577Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-07-08T11:57:46.211587Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-07-08T11:57:46.211931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-07-08T11:57:46.211957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-07-08T11:57:46.211961Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-07-08T11:57:46.212035Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-07-08T11:57:46.217149Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-07-08T11:57:46.217556Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-07-08T11:57:46.217570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-07-08T11:57:46.217592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-07-08T11:57:46.217600Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-07-08T11:57:46.217605Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-07-08T11:57:46.217631Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-07-08T11:57:46.218079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-07-08T11:57:46.218415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-07-08T11:57:46.218933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-07-08T11:57:46.219014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-07-08T11:57:46.219024Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-07-08T11:57:46.219037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-07-08T11:57:46.219127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 673 RawX2: 30064773658 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-07-08T11:57:46.220093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-07-08T11:57:46.220160Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-07-08T11:57:46.225420Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-07-08T11:57:46.225885Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-07-08T11:57:46.225919Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-07-08T11:57:44.870178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679040204359626:2236];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:44.877115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bcd/r3tmp/tmp2A1OJb/pdisk_1.dat 2025-07-08T11:57:45.198913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:45.200546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:45.200562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:45.217702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30049 TServer::EnableGrpc on GrpcPort 23189, node 1 2025-07-08T11:57:45.653286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:45.653300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:45.653302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:45.653348Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:45.857241Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; TClient is connected to server localhost:30049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:46.113896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:46.129134Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:57:46.222410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1751975866326 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-07-08T11:57:46.290424Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handshake: worker# [1:7524679048794294601:2295] 2025-07-08T11:57:46.290488Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T11:57:46.290527Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T11:57:46.290549Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Send handshake: worker# [1:7524679048794294601:2295] 2025-07-08T11:57:46.290704Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-07-08T11:57:46.290739Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-07-08T11:57:46.290777Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679048794294705:2354] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-07-08T11:57:46.290782Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:46.290795Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679048794294705:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-07-08T11:57:46.300037Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7524679048794294705:2354] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-07-08T11:57:46.300066Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-07-08T11:57:46.300077Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7524679048794294702:2354] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |62.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> JsonChangeRecord::DataChange [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] |62.7%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[dq-precompute_result-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-precompute_result-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> test.py::test[blocks-minmax_strings--Results] [GOOD] >> test.py::test[blocks-nested_optionals--ForceBlocks] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 >> test.py::test[insert-unique_distinct_hints--ForceBlocks] [GOOD] >> test.py::test[insert-unique_distinct_hints--Results] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> TPDiskRaces::OwnerKilledWhileReadingLogAndThenKillLastOwner [GOOD] >> TPDiskTest::PDiskRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/backup_ut/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] Test command err: 2025-07-08T11:57:24.372230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524678955380718893:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:24.372281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpFtvKwg/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10277, node 1 2025-07-08T11:57:24.470758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:24.470794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:24.472703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:24.475034Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:24.475044Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:24.475052Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:24.475098Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:57:24.475247Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:24.531418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:24.814282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/"Create temporary directory "/Root/~backup_20250708T115724" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115724/table" }Backup table "/Root/~backup_20250708T115724/table" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table"Describe table "/Root/~backup_20250708T115724/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/permissions.pb"Read table "/Root/~backup_20250708T115724/table"Write data into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/data_00.csv"Drop table "/Root/~backup_20250708T115724/table"2025-07-08T11:57:25.176435Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250708T115724" in database2025-07-08T11:57:25.180525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/" to "/Root"2025-07-08T11:57:25.199340Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table" to "/Root/table"2025-07-08T11:57:25.203672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRaODeu/table/permissions.pb"2025-07-08T11:57:25.232443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-07-08T11:57:25.925254Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524678960674871382:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:57:25.925286Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpiaYob7/pdisk_1.dat 2025-07-08T11:57:25.978536Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19984, node 4 2025-07-08T11:57:25.992407Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:25.992419Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:25.992420Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:25.992459Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:26.023260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:26.023290Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:26.025114Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:57:26.054731Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:26.060725Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:26.420752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:57:26.521527Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/"Create temporary directory "/Root/~backup_20250708T115726" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115726/table" }Backup table "/Root/~backup_20250708T115726/table" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table"Describe table "/Root/~backup_20250708T115726/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table/permissions.pb"Read table "/Root/~backup_20250708T115726/table"Write data into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table/data_00.csv"Drop table "/Root/~backup_20250708T115726/table"2025-07-08T11:57:26.763712Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-07-08T11:57:26.763733Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found Remove temporary directory "/Root/~backup_20250708T115726" in database2025-07-08T11:57:26.780650Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715665:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/" to "/Root"2025-07-08T11:57:26.814613Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-07-08T11:57:26.814736Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found Process "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpIa1qk8/table" to "/Root/table"2025-07-08T11:57:26.846239Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/bui ... runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/"Create temporary directory "/Root/~backup_20250708T115745" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250708T115745" in database2025-07-08T11:57:45.587542Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715695:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource/create_external_data_source.sql"Check existence of the secret "secret"2025-07-08T11:57:45.875069Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.187291Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.342813Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715714:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.431401Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715717:0, at schemeshard: 72057594046644480 2025-07-08T11:57:46.617765Z node 22 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=22&id=YjIwYTM2YmUtNjhmN2U3NzYtNjg4NmM5MTEtZDZlMjU3NGE=, ActorId: [22:7524679045525541858:2654], ActorState: ExecuteState, TraceId: 01jzmycbgf89axv3byz6ftvehg, Create QueryResponse for error on request, msg: 2025-07-08T11:57:46.618401Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmycbgf89axv3byz6ftvehg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=YjIwYTM2YmUtNjhmN2U3NzYtNjg4NmM5MTEtZDZlMjU3NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpvzNv9Q/externalDataSource } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpRCIiIL/pdisk_1.dat 2025-07-08T11:57:48.021406Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:57:48.117057Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:48.133930Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:57:48.133959Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:57:48.138574Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3413, node 25 2025-07-08T11:57:48.158404Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:57:48.158416Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:57:48.158418Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:57:48.158470Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:57:48.221750Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:57:48.233528Z node 25 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:57:48.516816Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/"Create temporary directory "/Root/~backup_20250708T115748" in databaseProcess "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250708T115748/table" }Backup table "/Root/~backup_20250708T115748/table" to "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table"Describe table "/Root/~backup_20250708T115748/table"Write scheme into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/permissions.pb"2025-07-08T11:57:48.993010Z node 25 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Read table "/Root/~backup_20250708T115748/table"Write data into "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/data_00.csv"Drop table "/Root/~backup_20250708T115748/table"Remove temporary directory "/Root/~backup_20250708T115748" in database2025-07-08T11:57:49.294772Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-07-08T11:57:49.324716Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037892 not found 2025-07-08T11:57:49.324869Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037893 not found 2025-07-08T11:57:49.324871Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037894 not found 2025-07-08T11:57:49.324874Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037895 not found Restore "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/" to "/Root"Resolved db base path: "/Root"2025-07-08T11:57:49.402446Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037888 not found 2025-07-08T11:57:49.402459Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037891 not found 2025-07-08T11:57:49.402461Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037890 not found 2025-07-08T11:57:49.402463Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037889 not found Restore folder "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/" to "/Root"Process "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table"Read scheme from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table" to "/Root/table"2025-07-08T11:57:49.414681Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-07-08T11:57:49.561163Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T11:57:49.641274Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T11:57:49.702694Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-07-08T11:57:49.822439Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-07-08T11:57:49.832659Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037900 not found 2025-07-08T11:57:49.850492Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710765:0, at schemeshard: 72057594046644480 2025-07-08T11:57:49.974352Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037902 not found 2025-07-08T11:57:49.974364Z node 25 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 25, TabletId: 72075186224037901 not found Restore ACL "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/43nv/000a6f/r3tmp/tmpTsIBUg/table/permissions.pb"2025-07-08T11:57:50.348284Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> TPDiskTest::PDiskRestart [GOOD] >> TPDiskTest::PDiskRestartManyLogWrites |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |62.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/session--Results] |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> TPDiskTest::PDiskRestartManyLogWrites [GOOD] >> TPDiskTest::CommitDeleteChunks >> JsonChangeRecord::DataChangeVersion [GOOD] |62.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> JsonChangeRecord::Heartbeat [GOOD] >> TPDiskTest::CommitDeleteChunks [GOOD] >> TPDiskTest::DeviceHaltTooLong |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part10/pytest >> test.py::test[hor_join-out_hor_join-default.txt-Results] [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--ForceBlocks] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |62.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> SharedThreads::RegistrationAndPassingAwayActorsLazyStrictPool [GOOD] >> test.py::test[pg-tpcds-q72-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/actors/core/ut/unittest >> SharedThreads::RegistrationAndPassingAwayActorsLazyStrictPool [GOOD] Test command err: Completed 7128776.197 Elapsed 28523619.71us Completed 6482099.28 Elapsed 25934587.94us Completed 9323858.428 Elapsed 37309456.48us Completed 8535548.868 Elapsed 34138919.27us Completed 9325704.095 Elapsed 37336183.87us Completed 9405926.198 Elapsed 37619795.38us >> test.py::test[select-select_concrete_detailed_columns-default.txt-Results] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> LocalPartitionReader::Simple [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--ForceBlocks] [GOOD] >> LocalPartitionReader::Booting [GOOD] >> test.py::test[expr-non_persistable_group_by_having_some_fail--Results] [GOOD] >> test.py::test[file-file_list_simple--ForceBlocks] |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |62.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> LocalPartitionReader::FeedSlowly |62.8%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[blocks-interval_mul--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul--Results] >> LocalPartitionReader::FeedSlowly [GOOD] |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] |62.8%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFileStoreWithReboots::CheckFileStoreHDDLimits |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |62.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[type_v3-append_diff_layout2--ForceBlocks] [SKIPPED] |62.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[key_filter-string_with-default.txt-ForceBlocks] [GOOD] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[key_filter-string_with-default.txt-Results] >> test.py::test[view-standalone_view_lambda--Results] [GOOD] >> test.py::test[weak_field-weak_field_data--Results] |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |62.9%| [TA] $(B)/ydb/library/actors/core/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |62.9%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TBlobStorageProxyTest::TestDoubleEmptyGet |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/backup/impl/ut_local_partition_reader/unittest |62.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest |62.9%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [TA] {RESULT} $(B)/ydb/library/actors/core/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TBlobStorageProxyTest::TestInFlightPuts >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block |62.9%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[insert-unique_distinct_hints--Results] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[limit-limit--Results] >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> test.py::test[window-win_func_over_group_by--ForceBlocks] [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> test.py::test[window-win_func_over_group_by--Results] >> TBlobStorageProxyTest::TestSingleFailureMirror ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> TFileStoreWithReboots::CheckFileStoreHDDLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:57:57.364843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:57:57.364872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:57.364878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:57:57.364883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:57:57.364889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:57:57.364893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:57:57.364907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:57:57.364921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:57:57.365010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:57:57.393987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:57:57.394013Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:57:57.438270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:57:57.438340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:57:57.438580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:57:57.451215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:57:57.451279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:57:57.451571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:57.452192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:57:57.455590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:57.455645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:57:57.456050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:57.456058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:57.456073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:57:57.456080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:57.456085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:57:57.456118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.458122Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:57:57.550030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:57:57.550119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.550189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:57:57.550230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:57:57.550242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.554650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:57.554683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:57:57.554734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.554743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:57:57.554748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:57:57.554761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:57:57.555355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.555364Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:57.555369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:57:57.555992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.555999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.556004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:57.556012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:57:57.556735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:57.559205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:57:57.559250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:57:57.559430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:57.559451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:57.559462Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:57.560631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:57:57.560639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:57:57.560667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:57:57.560679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:57:57.563024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:57.563032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:57.563077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:57.563083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:57:57.563092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:57:57.563098Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:57:57.563109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:57.563113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:57.563118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:57:57.563120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:57.563124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:57:57.563129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:57:57.563133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:57:57.563136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:57:57.563146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:57:57.563151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:57:57.563155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:57:57.563508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:57:57.563520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... lKind: "pool-kind-1" IOPS: 0 Throughput: 0 Size: 0 } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-07-08T11:57:59.317098Z node 2 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type FileStore, boot OK, tablet id 72075186233409549 2025-07-08T11:57:59.317195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2025-07-08T11:57:59.317211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 110, shardIdx: 72057594046678944:4, partId: 0 2025-07-08T11:57:59.317248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2025-07-08T11:57:59.317259Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-07-08T11:57:59.317266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 110:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409549 Origin: 72057594037968897 2025-07-08T11:57:59.317306Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 2 -> 3 2025-07-08T11:57:59.317892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-07-08T11:57:59.318721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-07-08T11:57:59.319147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-07-08T11:57:59.319251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-07-08T11:57:59.319262Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:59.319820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275054593 2025-07-08T11:57:59.319864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 0, tablet: 72075186233409549 2025-07-08T11:57:59.320867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 110, tablet: 72075186233409549, partId: 0 2025-07-08T11:57:59.320900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 110:0, at schemeshard: 72057594046678944, message: TxId: 110 Origin: 72075186233409549 Status: OK 2025-07-08T11:57:59.320906Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TConfigureParts operationId# 110:0 HandleReply TEvUpdateConfigResponse, at schemeshard: 72057594046678944 2025-07-08T11:57:59.320914Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 3 -> 128 2025-07-08T11:57:59.321551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 110:0, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321585Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId# 110:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 110 ready parts: 1/1 2025-07-08T11:57:59.321623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 110 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:57:59.321876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 110:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:110 msg type: 269090816 2025-07-08T11:57:59.321899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 110, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 110 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 110 at step: 5000009 2025-07-08T11:57:59.321959Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 110 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321982Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateFileStore::TPropose operationId# 110:0 HandleReply TEvOperationPlan, step: 5000009, at schemeshard: 72057594046678944 2025-07-08T11:57:59.321999Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 110:0 128 -> 240 2025-07-08T11:57:59.322055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:57:59.322066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: Erasing txId 110 2025-07-08T11:57:59.322347Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:57:59.322353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:57:59.322402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 110, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:57:59.322420Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:57:59.322424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 110, path id: 1 2025-07-08T11:57:59.322429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 110, path id: 5 2025-07-08T11:57:59.322437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 110:0, at schemeshard: 72057594046678944 2025-07-08T11:57:59.322443Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 110:0 ProgressState 2025-07-08T11:57:59.322453Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2025-07-08T11:57:59.322457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-07-08T11:57:59.322462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#110:0 progress is 1/1 2025-07-08T11:57:59.322464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-07-08T11:57:59.322469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 110, ready parts: 1/1, is published: false 2025-07-08T11:57:59.322474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 110 ready parts: 1/1 2025-07-08T11:57:59.322479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 110:0 2025-07-08T11:57:59.322485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 110:0 2025-07-08T11:57:59.322509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-07-08T11:57:59.322514Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 110, publications: 2, subscribers: 0 2025-07-08T11:57:59.322518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-07-08T11:57:59.322521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 110, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-07-08T11:57:59.322704Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-07-08T11:57:59.322714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 110 2025-07-08T11:57:59.322718Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 110 2025-07-08T11:57:59.322723Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-07-08T11:57:59.322727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:57:59.322849Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-07-08T11:57:59.322858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 110 2025-07-08T11:57:59.322862Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 110 2025-07-08T11:57:59.322866Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 110, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-07-08T11:57:59.322869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:57:59.322877Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 110, subscribers: 0 2025-07-08T11:57:59.329300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 2025-07-08T11:57:59.329657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 110 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2025-07-08T11:57:59.329730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2025-07-08T11:57:59.329737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2025-07-08T11:57:59.329822Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2025-07-08T11:57:59.329849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2025-07-08T11:57:59.329855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [2:669:2619] TestWaitNotification: OK eventTxId 110 |62.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestNormal >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> test.py::test[pg-tpch-q05-default.txt-ForceBlocks] [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestPersistence >> test.py::test[pg-tpch-q05-default.txt-Results] |62.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |62.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |62.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |62.9%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming] [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> test.py::test[blocks-nested_optionals--ForceBlocks] [GOOD] >> test.py::test[blocks-nested_optionals--Results] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> test.py::test[blocks-interval_mul--Results] [GOOD] >> test.py::test[column_group-hint_non_str_yson_fail--ForceBlocks] >> test.py::test[key_filter-string_with-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] >> test.py::test[blocks-sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> test.py::test[column_group-hint_non_str_yson_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe |63.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> test.py::test[window-win_func_over_group_by_compl--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_compl--Results] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> test.py::test[blocks-nested_optionals--Results] [GOOD] >> test.py::test[blocks-tuple_type--ForceBlocks] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-07-08T11:58:05.050119Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002270/r3tmp/tmpczB2fd//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:58:05.056392Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> test.py::test[file-file_list_simple--ForceBlocks] [GOOD] |63.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[column_group-hint_non_str_yson_fail--Results] [SKIPPED] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> TBlobStorageProxyTest::TestEmptyDiscover >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> test.py::test[weak_field-weak_field_data--Results] [GOOD] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_filestore_reboots/unittest >> test.py::test[weak_field-weak_field_join--Results] >> TBlobStorageProxyTest::TestVPutVGet >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |63.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part8/pytest >> test.py::test[pg-tpcds-q94-default.txt-ForceBlocks] [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> KqpPragma::ResetPerQuery >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |63.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |63.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part7/pytest >> test.py::test[file-file_list_simple--ForceBlocks] [GOOD] >> KqpScripting::StreamScanQuery >> KqpYql::UpdateBadType >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test_stop.py::TestStop::test_stop_query[v1-streaming] >> KqpScripting::ScriptValidate >> KqpYql::InsertCVList+useSink >> KqpYql::BinaryJsonOffsetNormal |63.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |63.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TBlobStorageProxyTest::TestSingleFailure [GOOD] |63.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> KqpYql::UpdateBadType [GOOD] >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-07-08T11:58:11.857880Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/0022fb/r3tmp/tmpBxhfqp//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:58:11.870967Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |63.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::InsertCVList-useSink >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |63.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> Describe::Statistics |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |63.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |63.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> KqpPragma::Warning [GOOD] >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |63.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 5906, MsgBus: 23266 2025-07-08T11:58:12.458068Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679163731021183:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:12.458161Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d7e/r3tmp/tmp8rt2KW/pdisk_1.dat 2025-07-08T11:58:12.561104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:12.561598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:12.561616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:12.568299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5906, node 1 2025-07-08T11:58:12.612236Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:12.612250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:12.612251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:12.612290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23266 TClient is connected to server localhost:23266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:12.691352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.705690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:12.765614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.808572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.852847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.874136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.981898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.994005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.016673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.053456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.072374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.097250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.155581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] >> test.py::test[join-anyjoin_common_nodata_keys--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_nodata_keys--Results] >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::RebootDuringCompletion >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] |63.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[window-win_func_over_group_by--Results] [GOOD] |63.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs |63.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |63.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::UidAsIdempotencyKey ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-07-08T11:58:08.466568Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002250/r3tmp/tmp60y225//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-07-08T11:58:08.466979Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:11.593724Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002250/r3tmp/tmp60y225//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-07-08T11:58:11.593770Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:11.869681Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002250/r3tmp/tmp60y225//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-07-08T11:58:11.870140Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:13.169250Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002250/r3tmp/tmp60y225//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-07-08T11:58:13.169423Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:13.445072Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002250/r3tmp/tmp60y225//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-07-08T11:58:13.446638Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[window-full/session_aliases--Results] |63.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 14740, MsgBus: 19142 2025-07-08T11:58:11.510575Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679155801706067:2220];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:11.512117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bde/r3tmp/tmpUcS64R/pdisk_1.dat 2025-07-08T11:58:11.752191Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:11.759751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:11.759780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:11.765437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14740, node 1 2025-07-08T11:58:11.849172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:11.849182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:11.849184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:11.849225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19142 TClient is connected to server localhost:19142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:11.945774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:11.961220Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:12.009580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.087281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.138342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:12.155740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.236346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.247001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.259849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.274371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.294057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.307712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.326285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.511611Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:12.633990Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679160096675699:2455], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-07-08T11:58:12.634865Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDczMGU2YS1iYTUyYWNlLTg4MWI1MTA1LTRjNDk5ZWM1, ActorId: [1:7524679160096675652:2447], ActorState: ExecuteState, TraceId: 01jzmyd5mn7wq9t3767xhbe9mf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 18774, MsgBus: 22277 2025-07-08T11:58:13.105937Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679165520466559:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:13.107684Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bde/r3tmp/tmpL2hlvw/pdisk_1.dat 2025-07-08T11:58:13.124262Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18774, node 2 2025-07-08T11:58:13.156268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.156278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.156281Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.156339Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22277 2025-07-08T11:58:13.209319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:13.209345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:13.212299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.241456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.242726Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.248920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.261478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:13.278326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.288638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.604848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.622945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.641107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.651131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.664332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.681929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.745738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.113566Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpYql::InsertCVList-useSink [GOOD] >> test.py::test[blocks-tuple_type--ForceBlocks] [GOOD] >> test.py::test[blocks-tuple_type--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 9719, MsgBus: 1993 2025-07-08T11:58:12.297682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679163102188602:2176];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:12.343580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000be4/r3tmp/tmpWTTLkJ/pdisk_1.dat 2025-07-08T11:58:12.382127Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9719, node 1 2025-07-08T11:58:12.416728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:12.416740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:12.416742Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:12.416787Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1993 2025-07-08T11:58:12.451116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:12.451145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:12.451634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:12.509632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.512634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:58:12.585666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.622510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.661899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.681267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:12.853391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.919658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.933300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.943036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.962959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.980318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:12.995964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.242836Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975893276, txId: 281474976710670] shutting down 2025-07-08T11:58:13.293259Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 23686, MsgBus: 6198 2025-07-08T11:58:13.717214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679164068863201:2194];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:13.719421Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000be4/r3tmp/tmpk1zGV3/pdisk_1.dat 2025-07-08T11:58:13.744779Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23686, node 2 2025-07-08T11:58:13.769298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.769310Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.769313Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.769355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6198 2025-07-08T11:58:13.827039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:13.827065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:13.828075Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.853427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.855030Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.866119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.882573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.957686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.984251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.101782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.122631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.136150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.146902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.162402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.226114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.252913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> KqpScripting::ScriptStats [GOOD] >> test.py::test[select-table_funcs_spec-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled--Results] >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> KqpYql::Closure [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> test.py::test[blocks-top_sort_two_desc--ForceBlocks] [GOOD] >> test.py::test[blocks-top_sort_two_desc--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 25725, MsgBus: 8019 2025-07-08T11:58:12.876683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679162160508301:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:12.876705Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d70/r3tmp/tmpFyWXOZ/pdisk_1.dat 2025-07-08T11:58:12.946375Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25725, node 1 2025-07-08T11:58:12.979513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:12.979539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:12.984506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:13.011532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.011545Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.011547Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.011584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8019 TClient is connected to server localhost:8019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.092074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.098454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.110779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.150510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.190070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.210133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.376036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.399298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.410878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.426213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.439609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.458523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.485953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.694454Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T11:58:13.696451Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T11:58:13.696483Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T11:58:13.696538Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679166455478101:2459], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7524679166455478078:2459]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7524679166455478101:2459].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T11:58:13.696612Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679166455478094:2459], SessionActorId: [1:7524679166455478078:2459], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7524679166455478078:2459]. isRollback=0 2025-07-08T11:58:13.696668Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmYyY2M3N2MtN2U1YTlhNWYtY2E3NzhjMTUtZDI2MDIwMTg=, ActorId: [1:7524679166455478078:2459], ActorState: ExecuteState, TraceId: 01jzmyd6ms5h6s1ksdkdzhbjb9, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7524679166455478095:2459] from: [1:7524679166455478094:2459] 2025-07-08T11:58:13.696753Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679166455478095:2459] TxId: 281474976710670. Ctx: { TraceId: 01jzmyd6ms5h6s1ksdkdzhbjb9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYyY2M3N2MtN2U1YTlhNWYtY2E3NzhjMTUtZDI2MDIwMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T11:58:13.696800Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmYyY2M3N2MtN2U1YTlhNWYtY2E3NzhjMTUtZDI2MDIwMTg=, ActorId: [1:7524679166455478078:2459], ActorState: ExecuteState, TraceId: 01jzmyd6ms5h6s1ksdkdzhbjb9, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 Trying to start YDB, gRPC: 28723, MsgBus: 21800 2025-07-08T11:58:14.192138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679168484215923:2093];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:14.192992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d70/r3tmp/tmp2OLyS8/pdisk_1.dat 2025-07-08T11:58:14.220824Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28723, node 2 2025-07-08T11:58:14.243675Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:14.243691Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:14.243693Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:14.243741Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21800 2025-07-08T11:58:14.293213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:14.293243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:14.294940Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:14.330153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.394001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.418411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:14.453810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.470066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.547431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.557266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.565647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.582175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.593452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.608386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.630537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.894004Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679168484218353:2459], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmyd7sx6zrkv5zabnjy9td4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NWFkOTgzMS0yZDY5ZWExOS01N2M1ZTQwMi0yYzhjOWQ1OA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T11:58:14.894122Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679168484218354:2460], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NWFkOTgzMS0yZDY5ZWExOS01N2M1ZTQwMi0yYzhjOWQ1OA==. CustomerSuppliedId : . TraceId : 01jzmyd7sx6zrkv5zabnjy9td4. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [2:7524679168484218350:2450], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T11:58:14.894170Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWFkOTgzMS0yZDY5ZWExOS01N2M1ZTQwMi0yYzhjOWQ1OA==, ActorId: [2:7524679168484218321:2450], ActorState: ExecuteState, TraceId: 01jzmyd7sx6zrkv5zabnjy9td4, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 4562, MsgBus: 32051 2025-07-08T11:58:12.902441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679163551611877:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:12.904392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d7a/r3tmp/tmpJ1Ha2k/pdisk_1.dat 2025-07-08T11:58:12.982389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:12.982410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:12.986759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:12.997156Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4562, node 1 2025-07-08T11:58:13.033192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.033206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.033208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.033250Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32051 TClient is connected to server localhost:32051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.161733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.169496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.249879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.317608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.349535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.380348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.516231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.530412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.583088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.600798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.616796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.630055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.645250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.880882Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 8596, MsgBus: 28159 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d7a/r3tmp/tmpxpGQJj/pdisk_1.dat 2025-07-08T11:58:14.401085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:58:14.405332Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8596, node 2 2025-07-08T11:58:14.423808Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:14.423831Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:14.423832Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:14.423871Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28159 TClient is connected to server localhost:28159 2025-07-08T11:58:14.485695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:14.485723Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T11:58:14.486768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:14.495678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.505647Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:14.518981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.535462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.566095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.586134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.792081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.803212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.826526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.845610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.851837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.914201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.937491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.215712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.310049Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895341, txId: 281474976715673] shutting down 2025-07-08T11:58:15.380481Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:15.390901Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895432, txId: 281474976715677] shutting down 2025-07-08T11:58:15.443017Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895441, txId: 281474976715681] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 4771, MsgBus: 9119 2025-07-08T11:58:13.282913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679165602907791:2175];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d6e/r3tmp/tmptkt8TB/pdisk_1.dat 2025-07-08T11:58:13.333262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:13.375024Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4771, node 1 2025-07-08T11:58:13.429737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:13.429761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:13.437283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:13.457166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.457178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.457180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.457223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9119 TClient is connected to server localhost:9119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.530269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.537255Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.542132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.610372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:13.651745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.680066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.784247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.799419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.813107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.824765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.842224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.899099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.916491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.181845Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975894200, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 21371, MsgBus: 13760 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d6e/r3tmp/tmpEuWgEH/pdisk_1.dat 2025-07-08T11:58:14.633248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:58:14.645921Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21371, node 2 2025-07-08T11:58:14.661491Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:14.661504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:14.661506Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:14.661544Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13760 TClient is connected to server localhost:13760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:14.729743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:14.729771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:14.730153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.731352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:58:14.737041Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:14.745653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.767675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.795084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.810254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:15.121698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.129148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.148699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.162409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.173455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.184064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.242990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.450293Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895488, txId: 281474976715670] shutting down 2025-07-08T11:58:15.486890Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895523, txId: 281474976715672] shutting down 2025-07-08T11:58:15.516774Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975895558, txId: 281474976715674] shutting down >> TExportToS3Tests::ExportStartTime |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> test.py::test[window-win_func_over_group_by_compl--Results] [GOOD] >> test.py::test[window-win_func_part_by_expr--ForceBlocks] >> TxUsage::TwoSessionOneConsumer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 21474, MsgBus: 3899 2025-07-08T11:58:12.997303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679162997182106:2226];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:13.079665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d74/r3tmp/tmpnIIP65/pdisk_1.dat 2025-07-08T11:58:13.121620Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21474, node 1 2025-07-08T11:58:13.170837Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:13.170849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:13.170851Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:13.170885Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:13.185288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:13.185313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:13.189413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3899 TClient is connected to server localhost:3899 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:13.281680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.289433Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:13.302194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.375277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.428344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.462119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:13.545353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.584833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.596581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.618042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.635276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.707115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.785966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:13.988720Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 61295, MsgBus: 1705 2025-07-08T11:58:14.721041Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679171149632437:2241];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000d74/r3tmp/tmpcuOA0L/pdisk_1.dat 2025-07-08T11:58:14.722349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:14.741304Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61295, node 2 2025-07-08T11:58:14.761216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:14.761227Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:14.761228Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:14.761264Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1705 TClient is connected to server localhost:1705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:58:14.822702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:14.822736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:14.822979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:14.823941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:14.828626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.847244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.884931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.898891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:15.211755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.227118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.293134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.308777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.323394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.338254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.350201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> BasicUsage::ConnectToYDB >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> YdbIndexTable::MultiShardTableOneIndex >> TxUsage::WriteToTopic_Demo_11 >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TSchemeShardServerLess::Fake [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> test.py::test[blocks-tuple_type--Results] [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> test.py::test[column_group-hint_anon_groups-single-ForceBlocks] [SKIPPED] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[column_order-select_where-default.txt-ForceBlocks] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:15.283481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:15.283505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.283510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:15.283515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:15.283529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:15.283533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:15.283542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.283565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:15.283630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:15.295621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:15.295637Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:15.299633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:15.299682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:15.299711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:15.303006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:15.303059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:15.303150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.303395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:15.304400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.304449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:15.304685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.304696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.304715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:15.304739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.304746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:15.304804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.307276Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:15.323981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:15.324060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.324121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:15.324171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:15.324181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.326986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:15.327075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:15.327089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:15.327097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:15.327528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:15.327825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327835Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.327840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.327847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.328386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:15.328707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:15.328746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:15.328908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.328930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:15.328936Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.329028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:15.329036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.329065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:15.329076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:15.329429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.329436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.329479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.329484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:15.329493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.329500Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:15.329510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.329514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.329519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.329521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.329525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:15.329529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.329534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:15.329538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:15.329548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:15.329554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:15.329558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:15.329986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:15.330003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... X_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-07-08T11:58:17.022220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022256Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022263Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:17.022323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:17.022731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-07-08T11:58:17.022763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-07-08T11:58:17.022902Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022925Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 12884904037 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022933Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-07-08T11:58:17.022955Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-07-08T11:58:17.022977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:27496 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A1CB8A14-4CAE-4620-8EB0-EAC805252349 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-07-08T11:58:17.030245Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.030260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:58:17.030335Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.030340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-07-08T11:58:17.030443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.030454Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.030621Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:17.030643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:17.030647Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-07-08T11:58:17.030653Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T11:58:17.030659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:17.030677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2025-07-08T11:58:17.031298Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:27496 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7823288D-6404-4414-B331-FA40160D7A31 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:27496 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8B9A48BB-075C-4ECE-BD4C-F0FCD9E4A972 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:27496 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 72D4868A-434A-4E66-9893-878D3EAF6E84 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-07-08T11:58:17.053385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 455 RawX2: 12884904312 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.053412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:17.053435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 455 RawX2: 12884904312 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.053450Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 455 RawX2: 12884904312 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.053464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.053468Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.053472Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:58:17.053479Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-07-08T11:58:17.053523Z node 3 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.054102Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.054186Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.054194Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-07-08T11:58:17.054210Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:17.054214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:17.054219Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:17.054221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:17.054226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-07-08T11:58:17.054239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:130:2155] message: TxId: 281474976710759 2025-07-08T11:58:17.054245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:17.054250Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-07-08T11:58:17.054254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-07-08T11:58:17.054279Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:17.054739Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-07-08T11:58:17.054753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-07-08T11:58:17.055104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:58:17.055120Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:485:2446] TestWaitNotification: OK eventTxId 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-07-08T11:58:11.046809Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-07-08T11:58:11.048147Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:12.393414Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-07-08T11:58:12.396096Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:13.731114Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-07-08T11:58:13.737864Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:14.989985Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-07-08T11:58:14.997127Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:16.158431Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-07-08T11:58:16.158830Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-07-08T11:58:16.587088Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/002295/r3tmp/tmpklxNCi//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-07-08T11:58:16.587190Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest |63.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |63.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[produce-discard_process_with_lambda-default.txt-ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[weak_field-weak_field_strict--Results] >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldRestartOnScanErrors >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:15.235306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:15.235329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.235335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:15.235339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:15.235350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:15.235354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:15.235361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.235379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:15.235445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:15.247026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:15.247043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:15.250520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:15.250570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:15.250593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:15.251838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:15.251882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:15.251959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.252093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:15.252767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.252805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:15.253013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.253024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.253040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:15.253045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.253051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:15.253072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.254099Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:15.271781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:15.271845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.271892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:15.271940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:15.271951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.274377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.274404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:15.274450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.274459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:15.274464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:15.274468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:15.278753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.278777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:15.278784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:15.279770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.279782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.279786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.279791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.280266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:15.280666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:15.280702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:15.280863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.280887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:15.280896Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.280986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:15.280995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.281023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:15.281032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:15.281451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.281459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.281496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.281501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:15.281511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.281516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:15.281525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.281529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.281534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.281537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.281541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:15.281545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.281549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:15.281554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:15.281564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:15.281569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:15.281573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:15.281984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:15.281998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 025-07-08T11:58:17.745789Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-07-08T11:58:17.745818Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 281474976710765:0, left await: 0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.745823Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 3 -> 128 2025-07-08T11:58:17.746191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746224Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746229Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710765 ready parts: 1/1 2025-07-08T11:58:17.746263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976710765 MinStep: 5000010 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:17.746498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2025-07-08T11:58:17.746522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 281474976710765 at step: 5000010 2025-07-08T11:58:17.746585Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746603Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 12884904037 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746609Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710765:0 HandleReply TEvOperationPlan, stepId: 5000010, at schemeshard: 72057594046678944 2025-07-08T11:58:17.746628Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 128 -> 129 2025-07-08T11:58:17.746659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 REQUEST: PUT /Backup2/metadata.json HTTP/1.1 HEADERS: Host: localhost:16122 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 93D0A23B-8B49-44CA-AB7B-1B74B6EEBEAA amz-sdk-request: attempt=1 content-length: 73 content-md5: 5UnTthDw7DG9u0TfCJZu+w== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000010 2025-07-08T11:58:17.759035Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.759052Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-07-08T11:58:17.759125Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.759131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 7 2025-07-08T11:58:17.759238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.759248Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710765 2025-07-08T11:58:17.759404Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-07-08T11:58:17.759415Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710765 2025-07-08T11:58:17.759419Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2025-07-08T11:58:17.759424Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-07-08T11:58:17.759431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-07-08T11:58:17.759448Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2025-07-08T11:58:17.760088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 REQUEST: PUT /Backup2/scheme.pb HTTP/1.1 HEADERS: Host: localhost:16122 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54CA7E60-1A0B-4617-A4EA-2C9CB52B104B amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/scheme.pb / / 355 REQUEST: PUT /Backup2/data_00.csv HTTP/1.1 HEADERS: Host: localhost:16122 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9AC7AE65-434B-47D4-A5AB-86519EB6476C amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /Backup2/data_00.csv / / 0 2025-07-08T11:58:17.780578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 816 RawX2: 12884904639 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.780608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710765, tablet: 72075186233409549, partId: 0 2025-07-08T11:58:17.780637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944, message: Source { RawX1: 816 RawX2: 12884904639 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.780653Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710765:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 816 RawX2: 12884904639 } Origin: 72075186233409549 State: 2 TxId: 281474976710765 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T11:58:17.780669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710765:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.780674Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.780680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710765:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-07-08T11:58:17.780688Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710765:0 129 -> 240 2025-07-08T11:58:17.780742Z node 3 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710765:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.781333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.781446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710765:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.781457Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710765:0 ProgressState 2025-07-08T11:58:17.781472Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-07-08T11:58:17.781477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-07-08T11:58:17.781482Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710765:0 progress is 1/1 2025-07-08T11:58:17.781488Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-07-08T11:58:17.781493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: true 2025-07-08T11:58:17.781508Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:130:2155] message: TxId: 281474976710765 2025-07-08T11:58:17.781517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2025-07-08T11:58:17.781522Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710765:0 2025-07-08T11:58:17.781527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710765:0 2025-07-08T11:58:17.781557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-07-08T11:58:17.781983Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2025-07-08T11:58:17.781997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710765 2025-07-08T11:58:17.782356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T11:58:17.782368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:846:2773] TestWaitNotification: OK eventTxId 104 |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |63.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::DisableAutoDropping >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> TIterator::Mixed [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> TIterator::MixedReverse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:15.229583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:15.229607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.229613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:15.229618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:15.229632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:15.229636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:15.229646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.229665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:15.229738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:15.243070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:15.243089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:15.246976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:15.247037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:15.247079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:15.248732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:15.248783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:15.248883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.249077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:15.250010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.250050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:15.250240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.250249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.250264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:15.250271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.250276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:15.250300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.251404Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:15.270199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:15.270275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.270331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:15.270388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:15.270398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.273140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.273176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:15.273233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.273245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:15.273250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:15.273254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:15.273781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.273795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:15.273801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:15.274153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.274160Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.274166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.274173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.274832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:15.275191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:15.275226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:15.275395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.275417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:15.275426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.275496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:15.275503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.275528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:15.275540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:15.275904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.275911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.275951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.275956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:15.275966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.275972Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:15.275982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.275986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.275991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.275994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.275998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:15.276004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.276008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:15.276015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:15.276024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:15.276030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:15.276034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:15.276416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:15.276428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: ... } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:18.223606Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-07-08T11:58:18.223639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-07-08T11:58:18.223787Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:18.223808Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 17179871342 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:18.223815Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-07-08T11:58:18.223836Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-07-08T11:58:18.223863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 943FA785-B9D2-44B7-A8AC-673C282D6B1E amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 816DBC49-6A2A-43A3-A1BC-DF2F87F5B3C5 amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-07-08T11:58:18.231766Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:18.231785Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:58:18.231873Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:18.231880Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-07-08T11:58:18.232026Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.232037Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:18.233004Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:18.233027Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:18.233032Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-07-08T11:58:18.233039Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T11:58:18.233047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:18.233069Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2025-07-08T11:58:18.233865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41167F75-2389-4FBC-87E1-C33A5B4C9F8A amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FBB5D972-9461-473B-B0EF-697AEAACE657 amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E7E10F75-FCEE-4262-9182-D0B39662BFF8 amz-sdk-request: attempt=1 content-length: 27 content-md5: CTqKvdXJPw0OgRdlsoR71Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 27 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:3120 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EEEAE052-1490-482D-9A36-E728E9EDA52A amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-07-08T11:58:18.241638Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:18.241659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:18.241681Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:18.241694Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:18.241708Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:18.241712Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.241716Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:58:18.241723Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-07-08T11:58:18.241765Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:18.242235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.242311Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.242320Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-07-08T11:58:18.242330Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:18.242334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:18.242338Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:18.242341Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:18.242345Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-07-08T11:58:18.242357Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:125:2151] message: TxId: 281474976710759 2025-07-08T11:58:18.242363Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:18.242368Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-07-08T11:58:18.242372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-07-08T11:58:18.242395Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:18.242765Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-07-08T11:58:18.242778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-07-08T11:58:18.243108Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:18.243118Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:473:2434] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::DisableAutoDropping [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:18.544612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:18.544637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:18.544642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:18.544647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:18.544653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:18.544657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:18.544670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:18.544683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:18.544743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:18.561906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:18.561929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:18.566119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:18.566170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:18.566208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:18.567571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:18.567613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:18.567699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:18.567845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:18.568528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:18.568568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:18.568771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:18.568780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:18.568797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:18.568803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:18.568809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:18.568832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.570020Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:18.589959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:18.590033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.590096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:18.590135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:18.590147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.593351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:18.593388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:18.593440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.593452Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:18.593457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:18.593463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:18.593985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.593998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:18.594004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:18.594339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.594349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.594356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:18.594363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:18.595050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:18.595446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:18.595481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:18.595661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:18.595687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:18.595698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:18.595771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:18.595780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:18.595809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:18.595821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:18.596227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:18.596235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:18.596276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:18.596281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:18.596292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:18.596299Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:18.596311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:18.596316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:18.596320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:18.596323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:18.596328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:18.596334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:18.596338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:18.596343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:18.596353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:18.596359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:18.596364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:18.596799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:18.596814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... # 106:0 ProgressState 2025-07-08T11:58:18.872438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-07-08T11:58:18.872442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:58:18.872447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-07-08T11:58:18.872450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:58:18.872454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-07-08T11:58:18.872460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:58:18.872464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-07-08T11:58:18.872469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-07-08T11:58:18.872501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T11:58:18.872723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-07-08T11:58:18.873212Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 Forgetting tablet 72075186234409546 2025-07-08T11:58:18.909496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-07-08T11:58:18.909593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:58:18.909784Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-07-08T11:58:18.909909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:18.910310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-07-08T11:58:18.910362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:18.910597Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2025-07-08T11:58:18.911068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-07-08T11:58:18.911106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2025-07-08T11:58:18.929221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-07-08T11:58:18.929385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:58:18.929399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:58:18.929439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:58:18.929653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:58:18.929663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:58:18.929677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:18.931553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-07-08T11:58:18.931574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-07-08T11:58:18.931897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-07-08T11:58:18.931909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-07-08T11:58:18.931929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-07-08T11:58:18.931936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-07-08T11:58:18.932082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T11:58:18.932184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-07-08T11:58:18.932192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-07-08T11:58:18.932277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-07-08T11:58:18.932309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:928:2790] TestWaitNotification: OK eventTxId 106 2025-07-08T11:58:18.932402Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932444Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 63us result status StatusPathDoesNotExist 2025-07-08T11:58:18.932496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932560Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932575Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 17us result status StatusPathDoesNotExist 2025-07-08T11:58:18.932592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932638Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:18.932662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 26us result status StatusSuccess 2025-07-08T11:58:18.932744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-07-08T11:58:18.932836Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-07-08T11:58:18.932848Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-07-08T11:58:18.932859Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-07-08T11:58:18.932866Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> TKesusTest::TestAttachNewSessions >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> test.py::test[join-anyjoin_common_nodata_keys--Results] [GOOD] >> TKesusTest::TestSessionDetach >> test.py::test[join-bush_dis_in_in--ForceBlocks] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestUnregisterProxy >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |63.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:16.565949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:16.565970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:16.565976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:16.565982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:16.565996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:16.566000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:16.566011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:16.566031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:16.566106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:16.579874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:16.579891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:16.583827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:16.583872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:16.583895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:16.585161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:16.585209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:16.585303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.585438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:16.586238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.586281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:16.586490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:16.586499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.586517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:16.586523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:16.586530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:16.586555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.587760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:16.609070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:16.609140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.609195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:16.609260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:16.609273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.613470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.613510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:16.613592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.613606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:16.613612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:16.613618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:16.621367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.621396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:16.621404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:16.625314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.625340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.625348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.625358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.626003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:16.633192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:16.633242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:16.633443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.633471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:16.633484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.633577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:16.633587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.633617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:16.633629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:16.634174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:16.634182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:16.634227Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.634233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:16.634243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.634250Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:16.634260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:16.634264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.634269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:16.634272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.634276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:16.634281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.634285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:16.634289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:16.634300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:16.634306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:16.634310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:16.634775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:16.634791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... KE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 Leader for TabletID 72057594046678944 is [4:561:2516] sender: [4:627:2058] recipient: [4:15:2062] 2025-07-08T11:58:19.310338Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:19.310387Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 17179871342 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:19.310401Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-07-08T11:58:19.310435Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-07-08T11:58:19.310472Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-07-08T11:58:19.311809Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:19.311823Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:58:19.311891Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:19.311898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:609:2553], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-07-08T11:58:19.311990Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.312000Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:19.312139Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:19.312152Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T11:58:19.312156Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-07-08T11:58:19.312161Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T11:58:19.312166Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:19.312185Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true 2025-07-08T11:58:19.313012Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A423D823-7D7A-485D-88D6-252D1D38669F amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AD85819D-D700-45EF-979F-E89246AB83C0 amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B097B3F7-FB5A-4FA5-A2B8-CB5C7BF2BCA7 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1DC31B3F-0B65-4020-857C-D33A18E639F4 amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C1F8B8B9-1318-4FFC-AD51-52C4DFE19A45 amz-sdk-request: attempt=1 content-length: 30 content-md5: wztA6/fCcYCMKR0jw2GMNw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 30 TestWaitNotification wait txId: 102 2025-07-08T11:58:19.316347Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T11:58:19.316360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T11:58:19.316448Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:58:19.316454Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:3868 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C370997F-BDDF-40E5-9FFA-DD5AD89EF337 amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-07-08T11:58:19.321749Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:19.321771Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:19.321796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:19.321810Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 445 RawX2: 17179871598 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-07-08T11:58:19.321823Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:19.321828Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.321832Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:58:19.321840Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-07-08T11:58:19.321881Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:19.328267Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.328403Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.328417Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-07-08T11:58:19.328442Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:19.328450Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:19.328456Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:19.328459Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:19.328464Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-07-08T11:58:19.328488Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:561:2516] message: TxId: 281474976710759 2025-07-08T11:58:19.328496Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:19.328501Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-07-08T11:58:19.328506Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-07-08T11:58:19.328535Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:19.329414Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-07-08T11:58:19.329449Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-07-08T11:58:19.333249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:19.333274Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:650:2589] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Changefeeds >> TKesusTest::TestReleaseLockFailure >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId |63.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |63.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TKesusTest::TestSessionTimeoutAfterUnregister >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TIterator::Serial [GOOD] >> TIterator::SerialReverse |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |63.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows |63.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part15/pytest >> test.py::test[blocks-top_sort_two_desc--Results] [GOOD] >> TKesusTest::TestKesusConfig ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:16.515673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:16.515699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:16.515704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:16.515710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:16.515725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:16.515730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:16.515739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:16.515760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:16.515834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:16.528768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:16.528786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:16.532314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:16.532362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:16.532393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:16.533620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:16.533664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:16.533754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.533891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:16.534586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.534640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:16.534853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:16.534861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.534879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:16.534886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:16.534892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:16.534917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.535977Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:16.554873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:16.554949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.555012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:16.555066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:16.555077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.555729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.555753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:16.555808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.555817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:16.555823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:16.555827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:16.556127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.556136Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:16.556141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:16.556388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.556395Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.556401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.556407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.557025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:16.557349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:16.557387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:16.557566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:16.557586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:16.557593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.557672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:16.557679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:16.557707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:16.557719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:16.558052Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:16.558059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:16.558101Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:16.558106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:16.558116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:16.558121Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:16.558132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:16.558136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.558147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:16.558150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.558154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:16.558160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:16.558165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:16.558169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:16.558178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:16.558184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:16.558188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:16.558571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:16.558584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 57594046678944 2025-07-08T11:58:19.660576Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.660581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:19.660602Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:19.660657Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.660664Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.660666Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:19.660669Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T11:58:19.660672Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:19.660738Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.660744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.660746Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:19.660748Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-07-08T11:58:19.660750Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:19.660755Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-07-08T11:58:19.661341Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:19.661391Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661397Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-07-08T11:58:19.661403Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661422Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-07-08T11:58:19.661442Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-07-08T11:58:19.661533Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661552Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 17179871341 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661560Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000007, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661580Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.661589Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-07-08T11:58:19.661592Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:19.661597Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-07-08T11:58:19.661600Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:19.661607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:19.661614Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:19.661619Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-07-08T11:58:19.661625Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:19.661629Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-07-08T11:58:19.661632Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-07-08T11:58:19.661639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:19.661644Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-07-08T11:58:19.661648Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:58:19.661651Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-07-08T11:58:19.661803Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662011Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662107Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:19.662112Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:19.662131Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:58:19.662147Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:19.662150Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-07-08T11:58:19.662153Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-07-08T11:58:19.662249Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662261Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:19.662264Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:58:19.662266Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:19.662338Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662344Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662347Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:19.662350Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T11:58:19.662352Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:19.662357Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-07-08T11:58:19.662360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:122:2148] 2025-07-08T11:58:19.662778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:19.662821Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-07-08T11:58:19.662829Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-07-08T11:58:19.662833Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T11:58:19.662836Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-07-08T11:58:19.662839Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2025-07-08T11:58:19.663069Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:19.663084Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:19.663090Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:608:2565] TestWaitNotification: OK eventTxId 102 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:17.484711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:17.484732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.484735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:17.484739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:17.484749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:17.484751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:17.484759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.484770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:17.484825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:17.493628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:17.493653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:17.497089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:17.497140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:17.497168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:17.498655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:17.498710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:17.498815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.498976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:17.499710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.499753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:17.499990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.499999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.500016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:17.500023Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.500028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:17.500054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.501309Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:17.520030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:17.520119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.520182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:17.520236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:17.520247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.521252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.521279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:17.521338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.521348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:17.521353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:17.521358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:17.521739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.521747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.521751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:17.522015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.522022Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.522027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.522034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.522627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:17.522970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:17.523006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:17.523180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.523200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:17.523208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.523270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:17.523276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.523301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:17.523313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:17.523656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.523662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.523699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.523704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:17.523713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.523719Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:17.523729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.523733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.523738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.523741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.523745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:17.523750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.523754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:17.523758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:17.523768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:17.523773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:17.523777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:17.524134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:17.524145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 94046678944 2025-07-08T11:58:19.655402Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.655410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:19.655435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:19.655602Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.655615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.655620Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:19.655625Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-07-08T11:58:19.655630Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:58:19.655985Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.656013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.656017Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:19.656022Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-07-08T11:58:19.656027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:19.656041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-07-08T11:58:19.656605Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:19.656641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-07-08T11:58:19.656647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-07-08T11:58:19.656653Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-07-08T11:58:19.656820Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-07-08T11:58:19.656849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-07-08T11:58:19.657129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.657171Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657191Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 12884904037 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657201Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657238Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-07-08T11:58:19.657243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:19.657249Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-07-08T11:58:19.657252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:19.657260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:19.657269Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:19.657275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-07-08T11:58:19.657282Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:19.657286Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-07-08T11:58:19.657291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-07-08T11:58:19.657299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:19.657305Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-07-08T11:58:19.657309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-07-08T11:58:19.657313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:58:19.657419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.657758Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:19.657799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:58:19.657821Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:19.657826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-07-08T11:58:19.657831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-07-08T11:58:19.657987Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658001Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658006Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:19.658011Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-07-08T11:58:19.658015Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:58:19.658089Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658099Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658102Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:19.658106Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:58:19.658110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:19.658118Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-07-08T11:58:19.658123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:130:2155] 2025-07-08T11:58:19.658796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:19.658872Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-07-08T11:58:19.658882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-07-08T11:58:19.658888Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T11:58:19.658892Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-07-08T11:58:19.658897Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-07-08T11:58:19.659219Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:19.659240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:58:19.659245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:840:2771] TestWaitNotification: OK eventTxId 103 >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TPart::MassCheck [GOOD] >> TPart::WreckPart >> TExportToS3Tests::Changefeeds [GOOD] >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-07-08T11:58:19.993573Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:19.993612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:19.997370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:19.997411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.013533Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.013718Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=3347319489544251054, session=0, seqNo=0) 2025-07-08T11:58:20.013767Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.037370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=3347319489544251054, session=1) 2025-07-08T11:58:20.037512Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=17845562286571497911, session=0, seqNo=0) 2025-07-08T11:58:20.037548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:20.053427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=17845562286571497911, session=2) 2025-07-08T11:58:20.279349Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.279387Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.283348Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.283392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.306511Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.306672Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=16061296019366760643, session=1, seqNo=0) 2025-07-08T11:58:20.321350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=16061296019366760643, session=1) 2025-07-08T11:58:20.622159Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.622193Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.625940Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.625977Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.639986Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.640202Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=8448678414560805469, session=0, seqNo=0) 2025-07-08T11:58:20.640243Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.672424Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=8448678414560805469, session=1) 2025-07-08T11:58:20.981654Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.981692Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.985529Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.985591Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.017699Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.017821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:133:2159], cookie=11610921086775374064, path="") 2025-07-08T11:58:21.033503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:133:2159], cookie=11610921086775374064, status=SUCCESS) 2025-07-08T11:58:21.033737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:142:2166], cookie=16173167669569305288, session=0, seqNo=0) 2025-07-08T11:58:21.033775Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.049376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:142:2166], cookie=16173167669569305288, session=1) 2025-07-08T11:58:21.049573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:143:2167], cookie=111, session=0, seqNo=0) 2025-07-08T11:58:21.049613Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:21.049661Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:143:2167], cookie=222, seqNo=0 2025-07-08T11:58:21.065978Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:143:2167], cookie=111, session=2) 2025-07-08T11:58:21.253824Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.253863Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.257826Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.257894Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.285555Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.285663Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:133:2159], cookie=3812645212615028357, path="") 2025-07-08T11:58:21.302580Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:133:2159], cookie=3812645212615028357, status=SUCCESS) 2025-07-08T11:58:21.302816Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=3740269268986609045, session=0, seqNo=0) 2025-07-08T11:58:21.302854Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.315561Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=3740269268986609045, session=1) 2025-07-08T11:58:21.315788Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:142:2166], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:21.315899Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:21.315917Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:21.315990Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:143:2167], cookie=111, session=0, seqNo=0) 2025-07-08T11:58:21.316004Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:21.316021Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:143:2167], cookie=222, session=1, seqNo=0) 2025-07-08T11:58:21.329447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:142:2166], cookie=123) 2025-07-08T11:58:21.329481Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:143:2167], cookie=111, session=2) 2025-07-08T11:58:21.329490Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:143:2167], cookie=222, session=1) >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> test.py::test[window-win_func_part_by_expr--ForceBlocks] [GOOD] >> TPart::WreckPartColumnGroups [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> test.py::test[window-win_func_part_by_expr--Results] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TPart::PageFailEnvColumnGroups >> TKesusTest::TestQuoterHDRRParametersValidation >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TxUsage::TwoSessionOneConsumer [GOOD] >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> test.py::test[column_order-select_where-default.txt-ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] Test command err: 2025-07-08T11:58:20.266762Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.266809Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.288461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.288507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.317621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.317799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=8047026960074547327, session=0, seqNo=0) 2025-07-08T11:58:20.317843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.341828Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=8047026960074547327, session=1) 2025-07-08T11:58:20.342155Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:131:2157], cookie=3025601410407445404, session=2) 2025-07-08T11:58:20.342185Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:131:2157], cookie=3025601410407445404) 2025-07-08T11:58:20.342259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:131:2157], cookie=4467672796241248196 2025-07-08T11:58:20.342334Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=11249898606652629548, session=1, seqNo=0) 2025-07-08T11:58:20.357773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=11249898606652629548, session=1) 2025-07-08T11:58:20.357884Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:20.357958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:20.357974Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:20.358025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:131:2157], cookie=12044306449317681509, session=1) 2025-07-08T11:58:20.371482Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:58:20.371534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:58:20.371543Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-07-08T11:58:20.385882Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=111) 2025-07-08T11:58:20.385917Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:131:2157], cookie=12044306449317681509) 2025-07-08T11:58:20.385929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:58:20.723057Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.723098Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.732035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.732097Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.757768Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.757879Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:133:2159], cookie=1334016389356897714, path="") 2025-07-08T11:58:20.777452Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:133:2159], cookie=1334016389356897714, status=SUCCESS) 2025-07-08T11:58:20.777631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:142:2166], cookie=111, session=0, seqNo=0) 2025-07-08T11:58:20.777664Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.777703Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:142:2166], cookie=4409406081192987695, session=1) 2025-07-08T11:58:20.789075Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:58:20.789104Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:58:20.805506Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:142:2166], cookie=111, session=1) 2025-07-08T11:58:20.805546Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:142:2166], cookie=4409406081192987695) 2025-07-08T11:58:20.805559Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:58:21.004677Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.004713Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.013995Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.014162Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.043370Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.043545Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=7286624864036498211, session=0, seqNo=0) 2025-07-08T11:58:21.043601Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.057726Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=7286624864036498211, session=1) 2025-07-08T11:58:21.057924Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:133:2159], cookie=18118283305347767707, session=1) 2025-07-08T11:58:21.057952Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:58:21.069424Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:133:2159], cookie=18118283305347767707) 2025-07-08T11:58:21.069627Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:150:2174], cookie=13637762318428751153) 2025-07-08T11:58:21.069644Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:150:2174], cookie=13637762318428751153) 2025-07-08T11:58:21.069736Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:153:2177], cookie=17317015472021868921, session=0, seqNo=0) 2025-07-08T11:58:21.069773Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:21.093648Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:153:2177], cookie=17317015472021868921, session=2) 2025-07-08T11:58:21.093898Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:133:2159], cookie=12174778959936768523, session=2) 2025-07-08T11:58:21.093929Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-07-08T11:58:21.109324Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:133:2159], cookie=12174778959936768523) 2025-07-08T11:58:21.376067Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.376099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.393447Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.393509Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.440882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.441113Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=12345, session=0, seqNo=0) 2025-07-08T11:58:21.441158Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.454218Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=12345, session=1) 2025-07-08T11:58:21.454389Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=23456, session=1, seqNo=0) 2025-07-08T11:58:21.465600Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=23456, session=1) 2025-07-08T11:58:21.731332Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.731364Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.753215Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.753310Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.785604Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.785797Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=12345, session=0, seqNo=0) 2025-07-08T11:58:21.785843Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.797628Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=12345, session=1) 2025-07-08T11:58:21.797789Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:140:2164], cookie=23456, session=1, seqNo=0) 2025-07-08T11:58:21.809591Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:140:2164], cookie=23456, session=1) >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestAcquireLocks >> TPart::PageFailEnvColumnGroups [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeout >> TPart::ForwardEnvColumnGroups >> TKesusTest::TestAttachOutOfSequence >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TxUsage::WriteToTopic_Demo_1 >> TPart::ManyVersions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:17.862330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:17.862351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.862356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:17.862361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:17.862375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:17.862378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:17.862387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.862404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:17.862470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:17.873151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:17.873166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:17.876613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:17.876658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:17.876679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:17.878044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:17.878089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:17.878178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.878343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:17.879094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.879135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:17.879344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.879354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.879369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:17.879375Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.879380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:17.879405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.880490Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:17.897070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:17.897127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.897174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:17.897222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:17.897230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.898614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.898650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:17.898706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.898715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:17.898719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:17.898723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:17.899416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.899428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.899433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:17.899791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.899801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.899806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.899812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.900376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:17.905227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:17.906185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:17.906400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.906438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:17.906451Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.906535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:17.906543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.906578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:17.906592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:17.907600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.907619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.907685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.907692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:17.907705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.907715Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:17.907731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.907735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.907741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.907745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.907750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:17.907767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.907772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:17.907777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:17.907798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:17.907804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:17.907809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:17.908388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:17.908403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 7594046678944 2025-07-08T11:58:21.425272Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-07-08T11:58:21.425284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:21.425316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:21.425413Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.425425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.425429Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:21.425434Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:58:21.425441Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:21.425635Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.425645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.425649Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:21.425653Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-07-08T11:58:21.425657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-07-08T11:58:21.425666Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-07-08T11:58:21.429796Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:21.430017Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430028Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-07-08T11:58:21.430034Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-07-08T11:58:21.430101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-07-08T11:58:21.430190Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430221Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 12884904037 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430232Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430272Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430283Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-07-08T11:58:21.430287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:21.430292Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-07-08T11:58:21.430295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:21.430309Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:21.430321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-07-08T11:58:21.430326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-07-08T11:58:21.430335Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-07-08T11:58:21.430339Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-07-08T11:58:21.430343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-07-08T11:58:21.430356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-07-08T11:58:21.430361Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-07-08T11:58:21.430365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-07-08T11:58:21.430368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-07-08T11:58:21.430540Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.430558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.430902Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:21.430960Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-07-08T11:58:21.430982Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:21.430987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-07-08T11:58:21.430991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:205:2207], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-07-08T11:58:21.431164Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431178Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431183Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:21.431188Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-07-08T11:58:21.431192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:21.431259Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431270Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-07-08T11:58:21.431273Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-07-08T11:58:21.431276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-07-08T11:58:21.431284Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-07-08T11:58:21.431289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:130:2155] 2025-07-08T11:58:21.431848Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431946Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-07-08T11:58:21.431964Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-07-08T11:58:21.431974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-07-08T11:58:21.431981Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T11:58:21.431986Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-07-08T11:58:21.431991Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-07-08T11:58:21.432277Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:21.432295Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:58:21.432301Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1382:3172] TestWaitNotification: OK eventTxId 105 >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::FewNodes >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-07-08T11:58:21.506960Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.506993Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.510860Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.510892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.524011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.524160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=17009044433225582129, session=0, seqNo=0) 2025-07-08T11:58:21.524198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.547564Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=17009044433225582129, session=1) 2025-07-08T11:58:21.547669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=2418880884305417344, session=0, seqNo=0) 2025-07-08T11:58:21.547703Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:21.560090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=2418880884305417344, session=2) 2025-07-08T11:58:21.560171Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=111, session=1, semaphore="Lock1" count=1) 2025-07-08T11:58:21.560214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:21.560252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:21.572839Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=111) 2025-07-08T11:58:21.572907Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:21.572981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:21.572997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-07-08T11:58:21.585666Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-07-08T11:58:21.585696Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=333) 2025-07-08T11:58:21.585807Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=1962770161716809083, name="Lock1") 2025-07-08T11:58:21.585836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=1962770161716809083) 2025-07-08T11:58:21.990422Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.990460Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.994746Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.994795Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.021849Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.022006Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=4662611597145681730, session=0, seqNo=0) 2025-07-08T11:58:22.022047Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:22.045303Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=4662611597145681730, session=1) 2025-07-08T11:58:22.045426Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=8325914555518870209, session=0, seqNo=0) 2025-07-08T11:58:22.045472Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:22.061304Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=8325914555518870209, session=2) 2025-07-08T11:58:22.061430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:22.061476Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:22.061493Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:22.073894Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=111) 2025-07-08T11:58:22.074031Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:22.074149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:22.085948Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-07-08T11:58:22.085985Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=333) 2025-07-08T11:58:22.086128Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:149:2173], cookie=16524216741046779279, name="Lock1") 2025-07-08T11:58:22.086150Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:149:2173], cookie=16524216741046779279) 2025-07-08T11:58:22.086201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2176], cookie=4333301238505056245, name="Lock1") 2025-07-08T11:58:22.086221Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2176], cookie=4333301238505056245) 2025-07-08T11:58:22.356399Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:22.356435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:22.360106Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:22.360145Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.373634Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.373799Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=7911261821365591461, session=0, seqNo=0) 2025-07-08T11:58:22.373842Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:22.399096Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=7911261821365591461, session=1) 2025-07-08T11:58:22.399221Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=4078500190535001712, session=0, seqNo=0) 2025-07-08T11:58:22.399265Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:22.410099Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=4078500190535001712, session=2) 2025-07-08T11:58:22.410280Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:22.410324Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:22.410340Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:22.421529Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2159], cookie=111) 2025-07-08T11:58:22.421631Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:22.421721Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:22.421732Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-07-08T11:58:22.434100Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2159], cookie=222) 2025-07-08T11:58:22.434135Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2159], cookie=333) 2025-07-08T11:58:22.434290Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2176], cookie=3592006678923702764, name="Lock1") 2025-07-08T11:58:22.434314Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2176], cookie=3592006678923702764) 2025-07-08T11:58:22.434370Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:155:2179], cookie=15257527677675744644, name="Lock1") 2025-07-08T11:58:22.434376Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:155:2179], cookie=15257527677675744644) 2025-07-08T11:58:22.437873Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:22.437902Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:22.437959Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:22.438107Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.493303Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.493362Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:22.493482Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:195:2209], cookie=9468158573760490667, name="Lock1") 2025-07-08T11:58:22.493507Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:195:2209], cookie=9468158573760490667) 2025-07-08T11:58:22.493611Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:203:2216], cookie=13954708478717766764, name="Lock1") 2025-07-08T11:58:22.493618Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:203:2216], cookie=13954708478717766764) 2025-07-08T11:58:22.735730Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:22.735768Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:22.740274Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:22.740337Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.777291Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.777462Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=10758344671550538536, session=0, seqNo=0) 2025-07-08T11:58:22.777511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:22.789257Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=10758344671550538536, session=1) 2025-07-08T11:58:22.789366Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=9441455808130430016, session=0, seqNo=0) 2025-07-08T11:58:22.789404Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:22.801823Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=9441455808130430016, session=2) 2025-07-08T11:58:22.801942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:22.801986Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:22.802006Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:22.813153Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=111) 2025-07-08T11:58:22.813252Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:22.813346Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2159], cookie=333, name="Lock1") 2025-07-08T11:58:22.813361Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-07-08T11:58:22.824206Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=222) 2025-07-08T11:58:22.824235Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2159], cookie=333) 2025-07-08T11:58:23.004658Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.004697Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.018096Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.018175Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.053364Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.054462Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:133:2159], cookie=6366676183626083642, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-07-08T11:58:23.054537Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:23.065770Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:133:2159], cookie=6366676183626083642) 2025-07-08T11:58:23.065940Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:142:2166], cookie=1060691380845924370, path="/Root/Res", config={ }) 2025-07-08T11:58:23.066000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-07-08T11:58:23.076887Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:142:2166], cookie=1060691380845924370) 2025-07-08T11:58:23.077352Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2171]. Cookie: 2491087696543690455. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:23.077370Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2171], cookie=2491087696543690455) 2025-07-08T11:58:23.077443Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:147:2171]. Cookie: 7489174071980147674. Data: { } 2025-07-08T11:58:23.077448Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:147:2171], cookie=7489174071980147674) 2025-07-08T11:58:23.121183Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:23.164632Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:23.201190Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:23.237107Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:23.285095Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } |63.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification |63.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-07-08T11:58:20.655013Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.655051Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.658849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.658880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.673213Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.673364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=12943739725407575599, session=0, seqNo=0) 2025-07-08T11:58:20.673416Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.695224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=12943739725407575599, session=1) 2025-07-08T11:58:20.695354Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=5927354259503264128, session=0, seqNo=0) 2025-07-08T11:58:20.695393Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:20.713323Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=5927354259503264128, session=2) 2025-07-08T11:58:20.713426Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:133:2159], cookie=111, name="Lock1") 2025-07-08T11:58:20.724569Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:133:2159], cookie=111) 2025-07-08T11:58:20.724674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:20.724756Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:20.724774Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:20.740842Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-07-08T11:58:20.740970Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:133:2159], cookie=333, name="Lock1") 2025-07-08T11:58:20.752047Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:133:2159], cookie=333) 2025-07-08T11:58:21.087599Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.087636Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.095130Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.095173Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.121714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.121875Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=9920027648293977402, session=0, seqNo=0) 2025-07-08T11:58:21.121916Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.133734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=9920027648293977402, session=1) 2025-07-08T11:58:21.133834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=16493074191343265503, session=0, seqNo=0) 2025-07-08T11:58:21.133871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:21.145688Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=16493074191343265503, session=2) 2025-07-08T11:58:21.145843Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:144:2168], cookie=1071043895698313722, name="Sem1", limit=1) 2025-07-08T11:58:21.145881Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:58:21.157681Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:144:2168], cookie=1071043895698313722) 2025-07-08T11:58:21.157775Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:21.157818Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:58:21.157862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-07-08T11:58:21.169753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=111) 2025-07-08T11:58:21.169782Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-07-08T11:58:21.169917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:152:2176], cookie=16415298279725092324, name="Sem1") 2025-07-08T11:58:21.169987Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:152:2176], cookie=16415298279725092324) 2025-07-08T11:58:21.170050Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:155:2179], cookie=9735589655695592085, name="Sem1") 2025-07-08T11:58:21.170058Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:155:2179], cookie=9735589655695592085) 2025-07-08T11:58:21.170089Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:133:2159], cookie=333, name="Sem1") 2025-07-08T11:58:21.170121Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-07-08T11:58:21.181743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:133:2159], cookie=333) 2025-07-08T11:58:21.181917Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=1722480306266295621, name="Sem1") 2025-07-08T11:58:21.181942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=1722480306266295621) 2025-07-08T11:58:21.182000Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2187], cookie=12314500552226191428, name="Sem1") 2025-07-08T11:58:21.182009Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2187], cookie=12314500552226191428) 2025-07-08T11:58:21.182036Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:133:2159], cookie=444, name="Sem1") 2025-07-08T11:58:21.182064Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-07-08T11:58:21.201206Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:133:2159], cookie=444) 2025-07-08T11:58:21.201347Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=17762973736847696370, name="Sem1") 2025-07-08T11:58:21.201365Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=17762973736847696370) 2025-07-08T11:58:21.201405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2195], cookie=7833267432580129927, name="Sem1") 2025-07-08T11:58:21.201409Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2195], cookie=7833267432580129927) 2025-07-08T11:58:21.518969Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.519004Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.522814Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.522851Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.534138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.534254Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:133:2159], cookie=4638593141941863161, name="Sem1", limit=1) 2025-07-08T11:58:21.534291Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:58:21.564293Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:133:2159], cookie=4638593141941863161) 2025-07-08T11:58:21.564492Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:142:2166], cookie=11742746053410429581, name="Sem2", limit=1) 2025-07-08T11:58:21.564555Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-07-08T11:58:21.581389Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:142:2166], cookie=11742746053410429581) 2025-07-08T11:58:21.581589Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:147:2171], cookie=13652934485404337085, name="Sem1") 2025-07-08T11:58:21.581617Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:147:2171], cookie=13652934485404337085) 2025-07-08T11:58:21.581683Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:150:2174], cookie=6161611710713490646, name="Sem2") 2025-07-08T11:58:21.581692Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:150:2174], cookie=6161611710713490646) 2025-07-08T11:58:21.587438Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.587471Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.587534Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.587659Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.648987Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.649127Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:190:2204], cookie=5082283220271957162, name="Sem1") 2025-07-08T11:58:21.649150Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:190:2204], cookie=5082283220271957162) 2025-07-08T11:58:21.649261Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:196:2209], cookie=5440148915873016254, name="Sem2") 2025-07-08T11:58:21.649268Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:196:2209], cookie=5440148915873016254) 2025-07-08T11:58:21.649322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:199:2212], cookie=2371919878354244017, name="Sem1", limit=1) 2025-07-08T11:58:21.665238Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:199:2212], cookie=2371919878354244017) 2025-07-08T11:58:21.665428Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:204:2217], cookie=16140133801058366499, name="Sem2", limit=1) 2025-07-08T11:58:21.681502Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:204:2217], cookie=16140133801058366499) 2025-07-08T11:58:21.681685Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:209:2222], cookie=13654523795371894224, name="Sem1") 2025-07-08T11:58:21.681708Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:209:2222], cookie=1365 ... :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-07-08T11:58:22.513072Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:225:2248], cookie=10762951693558794168) 2025-07-08T11:58:22.513240Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:230:2253], cookie=14683170193089622578, name="Sem1", force=0) 2025-07-08T11:58:22.513266Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-07-08T11:58:22.529873Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:230:2253], cookie=14683170193089622578) 2025-07-08T11:58:22.530069Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:235:2258], cookie=4197723563505618623, name="Sem1", limit=1) 2025-07-08T11:58:22.530142Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-07-08T11:58:22.541254Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:235:2258], cookie=4197723563505618623) 2025-07-08T11:58:22.541443Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:240:2263], cookie=14300384352939844563, name="Sem1", force=0) 2025-07-08T11:58:22.541472Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-07-08T11:58:22.553036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:240:2263], cookie=14300384352939844563) 2025-07-08T11:58:22.553230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:245:2268], cookie=7463744704501840190, name="Sem1", limit=1) 2025-07-08T11:58:22.553274Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-07-08T11:58:22.563969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:245:2268], cookie=7463744704501840190) 2025-07-08T11:58:22.564101Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:22.564144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-07-08T11:58:22.576366Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=111) 2025-07-08T11:58:22.576490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-07-08T11:58:22.610421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=222) 2025-07-08T11:58:22.610571Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2159], cookie=333, name="Sem1") 2025-07-08T11:58:22.610606Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-07-08T11:58:22.626741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2159], cookie=333) 2025-07-08T11:58:22.626880Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=444, session=2, semaphore="Sem1" count=1) 2025-07-08T11:58:22.651645Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=444) 2025-07-08T11:58:22.651786Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:133:2159], cookie=555, name="Sem1") 2025-07-08T11:58:22.651818Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-07-08T11:58:22.651831Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-07-08T11:58:22.670300Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:133:2159], cookie=555) 2025-07-08T11:58:23.153693Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.153726Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.181156Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.181245Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.205713Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.205852Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=17323144293952709420, session=0, seqNo=0) 2025-07-08T11:58:23.205890Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.217416Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=17323144293952709420, session=1) 2025-07-08T11:58:23.217526Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:133:2159], cookie=112, name="Sem1", limit=5) 2025-07-08T11:58:23.217566Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:58:23.232410Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:133:2159], cookie=112) 2025-07-08T11:58:23.232570Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:133:2159], cookie=113, name="Sem1") 2025-07-08T11:58:23.259097Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:133:2159], cookie=113) 2025-07-08T11:58:23.259211Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:133:2159], cookie=114, name="Sem1", force=0) 2025-07-08T11:58:23.259241Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-07-08T11:58:23.273660Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:133:2159], cookie=114) 2025-07-08T11:58:23.273738Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:133:2159], cookie=6452252757909174004 2025-07-08T11:58:23.273784Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:133:2159], cookie=115, name="Sem1", limit=5) 2025-07-08T11:58:23.285885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:133:2159], cookie=115) 2025-07-08T11:58:23.285978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:133:2159], cookie=116, name="Sem1") 2025-07-08T11:58:23.296940Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:133:2159], cookie=116) 2025-07-08T11:58:23.297054Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:133:2159], cookie=117, name="Sem1", force=0) 2025-07-08T11:58:23.309639Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:133:2159], cookie=117) 2025-07-08T11:58:23.309728Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=118, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:23.322777Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=118) 2025-07-08T11:58:23.322875Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:133:2159], cookie=119, name="Sem1") 2025-07-08T11:58:23.337773Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:133:2159], cookie=119) 2025-07-08T11:58:23.337868Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:133:2159], cookie=120, name="Sem1") 2025-07-08T11:58:23.337888Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:133:2159], cookie=120) 2025-07-08T11:58:23.337918Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:133:2159], cookie=5196729738670632398, session=1) 2025-07-08T11:58:23.337961Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:58:23.349769Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:133:2159], cookie=5196729738670632398) 2025-07-08T11:58:23.349875Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:133:2159], cookie=121, name="Sem1", limit=5) 2025-07-08T11:58:23.364310Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:133:2159], cookie=121) 2025-07-08T11:58:23.364392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:133:2159], cookie=122, name="Sem1") 2025-07-08T11:58:23.375810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:133:2159], cookie=122) 2025-07-08T11:58:23.375889Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:133:2159], cookie=123, name="Sem1", force=0) 2025-07-08T11:58:23.386854Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:133:2159], cookie=123) 2025-07-08T11:58:23.386951Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=124, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:23.399578Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=124) 2025-07-08T11:58:23.399674Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:133:2159], cookie=125, name="Sem1") 2025-07-08T11:58:23.420638Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:133:2159], cookie=125) 2025-07-08T11:58:23.420725Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:133:2159], cookie=126, name="Sem1") 2025-07-08T11:58:23.420744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:133:2159], cookie=126) 2025-07-08T11:58:23.420840Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:133:2159], cookie=127, name="Sem1", limit=5) 2025-07-08T11:58:23.420848Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:133:2159], cookie=127) 2025-07-08T11:58:23.420870Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:133:2159], cookie=128, name="Sem1") 2025-07-08T11:58:23.420877Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:133:2159], cookie=128) 2025-07-08T11:58:23.420898Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:133:2159], cookie=129, name="Sem1", force=0) 2025-07-08T11:58:23.420904Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:133:2159], cookie=129) 2025-07-08T11:58:23.420929Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=130, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:23.420936Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=130) 2025-07-08T11:58:23.420972Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:133:2159], cookie=131, name="Sem1") 2025-07-08T11:58:23.420978Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:133:2159], cookie=131) 2025-07-08T11:58:23.420995Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:133:2159], cookie=132, name="Sem1") 2025-07-08T11:58:23.421000Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:133:2159], cookie=132) >> BasicUsage::ConnectToYDB [GOOD] >> BasicUsage::WriteRead >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestCreateSemaphore [GOOD] |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> OperationMapping::IndexBuildRejected [GOOD] >> test.py::test[column_order-select_where-default.txt-Results] [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> test.py::test[count-count_all_grouped--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_bad_syntax.py::TestBadSyntax::test_bad_syntax[v1-mvp_external_ydb_endpoint0-without_created_read_rules-modify-analytics] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=279738) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/43nv/0018da/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/python/pytest/py3/_pytest/logging.py:375: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/43nv/0018da/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk4/testing_out_stuff/test_bad_syntax.py.TestBadSyntax.test_bad_syntax.v1-mvp_external_ydb_endpoint0-without_created_read_rules-create-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-07-08T11:58:21.202883Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.202918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.206672Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.206710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.222725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.222841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:133:2159], cookie=4843529627714010196, path="/foo/bar/baz") 2025-07-08T11:58:21.246328Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:133:2159], cookie=4843529627714010196, status=SUCCESS) 2025-07-08T11:58:21.246512Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:142:2166], cookie=13490069772163032289) 2025-07-08T11:58:21.257581Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:142:2166], cookie=13490069772163032289) 2025-07-08T11:58:21.257737Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:147:2171], cookie=11337959356611238033, path="/foo/bar/baz") 2025-07-08T11:58:21.269837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:147:2171], cookie=11337959356611238033, status=SUCCESS) 2025-07-08T11:58:21.269981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:152:2176], cookie=8779117632826014768) 2025-07-08T11:58:21.281364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:152:2176], cookie=8779117632826014768) 2025-07-08T11:58:21.284190Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.284220Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.284276Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.284401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.333658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.333767Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:194:2208], cookie=14839338981254662435) 2025-07-08T11:58:21.349620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:194:2208], cookie=14839338981254662435) 2025-07-08T11:58:21.349769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:202:2215], cookie=13973480955154788936, path="/foo/bar/baz") 2025-07-08T11:58:21.365360Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:202:2215], cookie=13973480955154788936, status=SUCCESS) 2025-07-08T11:58:21.365487Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:207:2220], cookie=16332035911704950888, path="/foo/bar/baz") 2025-07-08T11:58:21.365501Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:207:2220], cookie=16332035911704950888, status=PRECONDITION_FAILED) 2025-07-08T11:58:21.567871Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.567904Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.572377Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.572418Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.601685Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.601800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:133:2159], cookie=13972250716015150828, name="Lock1") 2025-07-08T11:58:21.601819Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:133:2159], cookie=13972250716015150828) 2025-07-08T11:58:21.905869Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:21.905901Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:21.909600Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:21.909627Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:21.930006Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:21.930176Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=809426383141040961, session=0, seqNo=0) 2025-07-08T11:58:21.930214Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:21.953680Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=809426383141040961, session=1) 2025-07-08T11:58:21.953781Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:21.954169Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:21.954186Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:21.973242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:133:2159], cookie=111) 2025-07-08T11:58:21.973385Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:144:2168], cookie=5315724118170334684, name="Lock1", force=0) 2025-07-08T11:58:21.985829Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:144:2168], cookie=5315724118170334684) 2025-07-08T11:58:21.985962Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:149:2173], cookie=9410297059403917714, name="Sem1", force=0) 2025-07-08T11:58:22.000514Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:149:2173], cookie=9410297059403917714) 2025-07-08T11:58:22.000645Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:154:2178], cookie=3747388456922608813, name="Sem1", limit=42) 2025-07-08T11:58:22.000677Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-07-08T11:58:22.017423Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:154:2178], cookie=3747388456922608813) 2025-07-08T11:58:22.017560Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:159:2183], cookie=18135987752130412490, name="Sem1", force=0) 2025-07-08T11:58:22.017583Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-07-08T11:58:22.028476Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:159:2183], cookie=18135987752130412490) 2025-07-08T11:58:22.028619Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:164:2188], cookie=14570353095394685595, name="Sem1", force=0) 2025-07-08T11:58:22.043512Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:164:2188], cookie=14570353095394685595) 2025-07-08T11:58:22.324085Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:22.324114Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:22.328838Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:22.328883Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.354237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.354370Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=7312558111329840079, session=0, seqNo=0) 2025-07-08T11:58:22.354406Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:22.369331Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=7312558111329840079, session=1) 2025-07-08T11:58:22.369450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=3734244575631139906, session=0, seqNo=0) 2025-07-08T11:58:22.369489Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:22.386129Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=3734244575631139906, session=2) 2025-07-08T11:58:22.386202Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:133:2159], cookie=17832253856743284957 2025-07-08T11:58:22.386279Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:145:2169], cookie=4305882572482904208, name="Sem1", limit=3) 2025-07-08T11:58:22.386306Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:58:22.401853Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:145:2169], cookie=4305882572482904208) 2025-07-08T11:58:22.401941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=112, name="Sem1") 2025-07-08T11:58:22.401957Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=112) 2025-07-08T11:58:22.401980Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=113, name="Sem1") 2025-07-08T11:58:22.401986Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=113) 2025-07-08T11:58:22.402008Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=9841906905619814207, session=2, seqNo=0) 2025-07-08T11:58:22.412934Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=9841906905619814207, session=2) 2025-07-08T11:58:22.413022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=114, name="Sem1") 2025-07-08T11:58:22.413037Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=114) 2025-07-08T11:58:22.413063Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=115, name="Sem1") 2025-07-08T11:58:22.413067Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=115) 2025-07-08T11:58:22.413121Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:152:2176], cookie=4355845173427907705, name="Sem1") 2025-07-08T11:58:22.425057Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:152:2176], cookie=4355845173427907705) 2025-07-08T11:58:22.425154Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=116, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:22.425192Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:58:22.435904Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=116) 2025-07-08T11:58:22.436001Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=117, session=2, semaphore="Sem1" count=2) 2025-07-08T11:58:22.436036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-07-08T11:58:22.449013Z node 4 :KESUS_TABLET DEBUG: [72057594037 ... 485204Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-07-08T11:58:23.496214Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:203:2221], cookie=6010166361118012307) 2025-07-08T11:58:23.496334Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=126, session=1, semaphore="Sem2" count=3) 2025-07-08T11:58:23.496382Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-07-08T11:58:23.514252Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=126) 2025-07-08T11:58:23.514392Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=127, name="Sem2") 2025-07-08T11:58:23.514417Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=127) 2025-07-08T11:58:23.514457Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=128, session=1, semaphore="Sem2" count=3) 2025-07-08T11:58:23.529417Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=128) 2025-07-08T11:58:23.930891Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.945044Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:23.955303Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=129, session=1, semaphore="Sem2" count=2) 2025-07-08T11:58:23.971006Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=129) 2025-07-08T11:58:23.971134Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=130, name="Sem2") 2025-07-08T11:58:23.971157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=130) 2025-07-08T11:58:23.971194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:133:2159], cookie=131, session=1, semaphore="Sem2" count=1) 2025-07-08T11:58:23.984840Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:133:2159], cookie=131) 2025-07-08T11:58:23.984975Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=132, name="Sem2") 2025-07-08T11:58:23.984998Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=132) 2025-07-08T11:58:23.985051Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:133:2159], cookie=133, name="Sem2") 2025-07-08T11:58:23.985058Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:133:2159], cookie=133) 2025-07-08T11:58:24.370851Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.370881Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.381210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.381272Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.413608Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.414835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:133:2159], cookie=8294214293191003781, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-07-08T11:58:24.414899Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-07-08T11:58:24.429665Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:133:2159], cookie=8294214293191003781) 2025-07-08T11:58:24.429811Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:142:2166], cookie=13032155326827923582, path="/Root1/Res", config={ }) 2025-07-08T11:58:24.429862Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-07-08T11:58:24.445372Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:142:2166], cookie=13032155326827923582) 2025-07-08T11:58:24.445532Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2171], cookie=13861256004155058293, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-07-08T11:58:24.445577Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-07-08T11:58:24.456976Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2171], cookie=13861256004155058293) 2025-07-08T11:58:24.457150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:152:2176], cookie=122441907416856523, path="/Root2/Res", config={ }) 2025-07-08T11:58:24.457205Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-07-08T11:58:24.470198Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:152:2176], cookie=122441907416856523) 2025-07-08T11:58:24.470297Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:157:2181], cookie=567752354613891561, path="/Root2/Res/Subres", config={ }) 2025-07-08T11:58:24.470331Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-07-08T11:58:24.481064Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:157:2181], cookie=567752354613891561) 2025-07-08T11:58:24.481423Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:162:2186]. Cookie: 17064507402534708005. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:24.481438Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:162:2186], cookie=17064507402534708005) 2025-07-08T11:58:24.522406Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:162:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.566234Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:162:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.605161Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:162:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.605348Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:169:2190]. Cookie: 5999191959613200583. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-07-08T11:58:24.605493Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:172:2193]. Cookie: 14023518881239080062. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:24.605504Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:172:2193], cookie=14023518881239080062) 2025-07-08T11:58:24.638083Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:172:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.689137Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:172:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.689307Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:177:2197]. Cookie: 18358705299438127607. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-07-08T11:58:24.689438Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:162:2186]. Cookie: 3108489989858773010. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:24.689447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:162:2186], cookie=3108489989858773010) 2025-07-08T11:58:24.689519Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:172:2193]. Cookie: 15539914008244286644. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:24.689525Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:172:2193], cookie=15539914008244286644) 2025-07-08T11:58:24.725046Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:172:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.725074Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:162:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:24.725195Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:184:2204]. Cookie: 8544941473921604996. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-07-08T11:58:24.175310Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.175344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.180930Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.180979Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.193653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.193850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=10617428137297140223, session=0, seqNo=0) 2025-07-08T11:58:24.193888Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:24.217657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=10617428137297140223, session=1) 2025-07-08T11:58:24.217769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:132:2158], cookie=14594890071979919356, session=0, seqNo=0) 2025-07-08T11:58:24.217802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:24.229869Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:132:2158], cookie=14594890071979919356, session=2) 2025-07-08T11:58:24.230087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:24.230157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:24.230172Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:24.246422Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=111) 2025-07-08T11:58:24.246524Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=112, session=1, semaphore="Lock2" count=1) 2025-07-08T11:58:24.246565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-07-08T11:58:24.246580Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-07-08T11:58:24.262057Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=112) 2025-07-08T11:58:24.262169Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:24.262266Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2157], cookie=333, name="Lock1") 2025-07-08T11:58:24.262281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-07-08T11:58:24.262289Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:58:24.262305Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:132:2158], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-07-08T11:58:24.277730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=222) 2025-07-08T11:58:24.277764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2157], cookie=333) 2025-07-08T11:58:24.277773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:132:2158], cookie=223) 2025-07-08T11:58:24.277844Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:131:2157], cookie=334, name="Lock2") 2025-07-08T11:58:24.277873Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-07-08T11:58:24.277884Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:58:24.289660Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:131:2157], cookie=334) 2025-07-08T11:58:24.289785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:161:2185], cookie=13212513446552898994, name="Lock1") 2025-07-08T11:58:24.289813Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:161:2185], cookie=13212513446552898994) 2025-07-08T11:58:24.289859Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:164:2188], cookie=10045259129109280987, name="Lock2") 2025-07-08T11:58:24.289865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:164:2188], cookie=10045259129109280987) 2025-07-08T11:58:24.292246Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.292271Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.292314Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.292349Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.352156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.352213Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:58:24.352221Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:58:24.352317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:204:2218], cookie=9643139358076189754, name="Lock1") 2025-07-08T11:58:24.352335Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:204:2218], cookie=9643139358076189754) 2025-07-08T11:58:24.352436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:212:2225], cookie=11572140788725098144, name="Lock2") 2025-07-08T11:58:24.352442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:212:2225], cookie=11572140788725098144) 2025-07-08T11:58:24.665554Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.665589Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.669769Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.669815Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.697224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.697426Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=13962895475793489211, session=0, seqNo=0) 2025-07-08T11:58:24.697466Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:24.708586Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=13962895475793489211, session=1) 2025-07-08T11:58:24.708674Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:134:2160], cookie=7839127088897001163, session=0, seqNo=0) 2025-07-08T11:58:24.708704Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:24.724789Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:134:2160], cookie=7839127088897001163, session=2) 2025-07-08T11:58:24.725021Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:24.725062Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:24.725078Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:24.738588Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=111) 2025-07-08T11:58:24.738702Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=112, session=1, semaphore="Lock2" count=1) 2025-07-08T11:58:24.738744Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-07-08T11:58:24.738760Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-07-08T11:58:24.749567Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=112) 2025-07-08T11:58:24.749669Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=333, session=1, semaphore="Lock1" count=1) 2025-07-08T11:58:24.749731Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2160], cookie=222, session=2, semaphore="Lock1" count=1) 2025-07-08T11:58:24.749748Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:58:24.749766Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2160], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-07-08T11:58:24.760559Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=333) 2025-07-08T11:58:24.760590Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2160], cookie=222) 2025-07-08T11:58:24.760596Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2160], cookie=223) 2025-07-08T11:58:24.760734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2184], cookie=12312162106210483594, name="Lock1") 2025-07-08T11:58:24.760753Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2184], cookie=12312162106210483594) 2025-07-08T11:58:24.760806Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:163:2187], cookie=14380642095659495284, name="Lock2") 2025-07-08T11:58:24.760816Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:163:2187], cookie=14380642095659495284) 2025-07-08T11:58:24.760859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:166:2190], cookie=9408666304990157156, name="Lock1") 2025-07-08T11:58:24.760865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:166:2190], cookie=9408666304990157156) 2025-07-08T11:58:24.760912Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:169:2193], cookie=11313339612670837990, name="Lock2") 2025-07-08T11:58:24.760918Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:169:2193], cookie=11313339612670837990) 2025-07-08T11:58:24.760969Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:134:2160], cookie=444, session=2, semaphore="Lock2" count=1) 2025-07-08T11:58:24.761005Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:58:24.771635Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:134:2160], cookie=444) 2025-07-08T11:58:24.771775Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2198], cookie=12528089970055176817, name="Lock2") 2025-07-08T11:58:24.771791Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2198], cookie=12528089970055176817) 2025-07-08T11:58:24.771849Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:177:2201], cookie=7543407958553289195, name="Lock2") 2025-07-08T11:58:24.771855Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:177:2201], cookie=7543407958553289195) 2025-07-08T11:58:24.774310Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.774335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.774387Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.774499Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.822251Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.822310Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:24.822316Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:58:24.822320Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-07-08T11:58:24.822322Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:58:24.822403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:217:2231], cookie=17229468808731435516, name="Lock1") 2025-07-08T11:58:24.822430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:217:2231], cookie=17229468808731435516) 2025-07-08T11:58:24.822556Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:225:2238], cookie=15557136962562302608, name="Lock2") 2025-07-08T11:58:24.822562Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:225:2238], cookie=15557136962562302608) >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-07-08T11:58:23.394917Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.394951Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.403376Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.403416Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.414805Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.414983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=12456510818700612897, session=0, seqNo=222) 2025-07-08T11:58:23.415026Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.441384Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=12456510818700612897, session=1) 2025-07-08T11:58:23.441519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2160], cookie=4414110407800815191, session=1, seqNo=111) 2025-07-08T11:58:23.452879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2160], cookie=4414110407800815191, session=1) 2025-07-08T11:58:23.758308Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.758349Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.762582Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.762641Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.784583Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.784764Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=111, session=0, seqNo=42) 2025-07-08T11:58:23.784806Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.784850Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=222, session=1, seqNo=41) 2025-07-08T11:58:23.795674Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=111, session=1) 2025-07-08T11:58:23.795742Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=222, session=1) 2025-07-08T11:58:24.095931Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.095970Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.109184Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.109229Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.125678Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.125831Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=5558605381382803127, session=0, seqNo=0) 2025-07-08T11:58:24.125873Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:24.157619Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=5558605381382803127, session=1) 2025-07-08T11:58:24.157959Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:150:2174], cookie=15612524290905277702) 2025-07-08T11:58:24.157986Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:150:2174], cookie=15612524290905277702) 2025-07-08T11:58:24.305869Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.305905Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.309984Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.310029Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.332626Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.613559Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.613594Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.617990Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.618054Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.645282Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.645455Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=14645434874322081571, session=0, seqNo=0) 2025-07-08T11:58:24.645501Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:24.657924Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=14645434874322081571, session=1) 2025-07-08T11:58:24.658032Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:24.658133Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:24.658150Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:24.669673Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=111) 2025-07-08T11:58:24.669927Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:147:2171], cookie=13487277020490484625, name="Sem1", limit=42) 2025-07-08T11:58:24.669964Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-07-08T11:58:24.680674Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:147:2171], cookie=13487277020490484625) 2025-07-08T11:58:24.680836Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:152:2176], cookie=14720855901769783977, name="Sem1", limit=42) 2025-07-08T11:58:24.693724Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:152:2176], cookie=14720855901769783977) 2025-07-08T11:58:24.693887Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:157:2181], cookie=2497264691564995010, name="Sem1", limit=51) 2025-07-08T11:58:24.704557Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:157:2181], cookie=2497264691564995010) 2025-07-08T11:58:24.704685Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:162:2186], cookie=4098371350113225129, name="Lock1", limit=42) 2025-07-08T11:58:24.719471Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:162:2186], cookie=4098371350113225129) 2025-07-08T11:58:24.719666Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:167:2191], cookie=15614324892454143680, name="Lock1", limit=18446744073709551615) 2025-07-08T11:58:24.733705Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:167:2191], cookie=15614324892454143680) 2025-07-08T11:58:24.733881Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:172:2196], cookie=14891185782747202974, name="Sem1") 2025-07-08T11:58:24.733914Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:172:2196], cookie=14891185782747202974) 2025-07-08T11:58:24.733982Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:175:2199], cookie=3654559628168589939, name="Sem2") 2025-07-08T11:58:24.733990Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:175:2199], cookie=3654559628168589939) 2025-07-08T11:58:24.736681Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:24.736707Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:24.736765Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:24.736936Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:24.781184Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:24.781241Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:24.781362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:215:2229], cookie=6780880212747949635, name="Sem1") 2025-07-08T11:58:24.781384Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:215:2229], cookie=6780880212747949635) 2025-07-08T11:58:24.781506Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:222:2235], cookie=5625313839068509770, name="Sem2") 2025-07-08T11:58:24.781517Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:222:2235], cookie=5625313839068509770) |63.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> TReplicationTests::CreateSequential >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] |63.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TReplicationTests::Create >> KqpCost::OltpWriteRow-isSink >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> test.py::test[window-win_func_part_by_expr--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] |63.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |63.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> test.py::test[join-bush_dis_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in--Results] |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |63.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part6/pytest >> test.py::test[weak_field-weak_field_strict--Results] [GOOD] >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-07-08T11:58:23.210534Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.210578Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.221479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.221517Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.246319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.248238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:133:2159], cookie=8236068998379879967, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-07-08T11:58:23.248302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:23.273285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:133:2159], cookie=8236068998379879967) 2025-07-08T11:58:23.273442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:142:2166], cookie=13792657937061301904, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-07-08T11:58:23.273489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-07-08T11:58:23.285679Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:142:2166], cookie=13792657937061301904) 2025-07-08T11:58:23.285800Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2171], cookie=12101331310320417462, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:23.285845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-07-08T11:58:23.298006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2171], cookie=12101331310320417462) 2025-07-08T11:58:23.298160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2176], cookie=8038937285426764749, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:23.298210Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-07-08T11:58:23.312456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2176], cookie=8038937285426764749) 2025-07-08T11:58:23.312607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:157:2181], cookie=6804041582231393749, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:23.312653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-07-08T11:58:23.329612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:157:2181], cookie=6804041582231393749) 2025-07-08T11:58:23.329793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:162:2186], cookie=7734023100662247834, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:23.329845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-07-08T11:58:23.345401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:162:2186], cookie=7734023100662247834) 2025-07-08T11:58:23.345561Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:167:2191], cookie=3168506146363649077, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-07-08T11:58:23.345612Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-07-08T11:58:23.361465Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:167:2191], cookie=3168506146363649077) 2025-07-08T11:58:23.361621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:172:2196], cookie=11898762143634892040, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:23.361670Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-07-08T11:58:23.378016Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:172:2196], cookie=11898762143634892040) 2025-07-08T11:58:23.378156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:177:2201], cookie=3407541540746256899, ids=[100], paths=[], recursive=0) 2025-07-08T11:58:23.378174Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:177:2201], cookie=3407541540746256899) 2025-07-08T11:58:23.378226Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:180:2204], cookie=17217225016409025750, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-07-08T11:58:23.378238Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:180:2204], cookie=17217225016409025750) 2025-07-08T11:58:23.378300Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:183:2207], cookie=12863945195834918933, ids=[], paths=[/Root, ], recursive=0) 2025-07-08T11:58:23.378310Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:183:2207], cookie=12863945195834918933) 2025-07-08T11:58:23.378358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:186:2210], cookie=15657887874379062352, ids=[1, 1], paths=[], recursive=0) 2025-07-08T11:58:23.378364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:186:2210], cookie=15657887874379062352) 2025-07-08T11:58:23.378411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:189:2213], cookie=2375638988219831935, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-07-08T11:58:23.378418Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:189:2213], cookie=2375638988219831935) 2025-07-08T11:58:23.378478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:192:2216], cookie=18028345907626048175, ids=[], paths=[], recursive=1) 2025-07-08T11:58:23.378486Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:192:2216], cookie=18028345907626048175) 2025-07-08T11:58:23.378550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:195:2219], cookie=4366608741373005675, ids=[], paths=[], recursive=0) 2025-07-08T11:58:23.378556Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:195:2219], cookie=4366608741373005675) 2025-07-08T11:58:23.379102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:198:2222], cookie=2968320875871994429, ids=[3, 2], paths=[], recursive=1) 2025-07-08T11:58:23.379111Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:198:2222], cookie=2968320875871994429) 2025-07-08T11:58:23.379165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:201:2225], cookie=9007078904749757818, ids=[3, 2], paths=[], recursive=0) 2025-07-08T11:58:23.379171Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:201:2225], cookie=9007078904749757818) 2025-07-08T11:58:23.379222Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:204:2228], cookie=2156788093089147015, ids=[], paths=[Root2/], recursive=1) 2025-07-08T11:58:23.379229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:204:2228], cookie=2156788093089147015) 2025-07-08T11:58:23.379276Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:207:2231], cookie=16153706770610036542, ids=[], paths=[Root2/], recursive=0) 2025-07-08T11:58:23.379281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:207:2231], cookie=16153706770610036542) 2025-07-08T11:58:23.385090Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.385119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.385180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.385306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.437362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.437469Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:247:2261], cookie=5838951492427629682, ids=[100], paths=[], recursive=0) 2025-07-08T11:58:23.437489Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:247:2261], cookie=5838951492427629682) 2025-07-08T11:58:23.437600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:253:2266], cookie=1819626103713004549, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-07-08T11:58:23.437610Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:253:2266], cookie=1819626103713004549) 2025-07-08T11:58:23.437667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2269], cookie=17771252063811703197, ids=[], paths=[/Root, ], recursive=0) 2025-07-08T11:58:23.437678Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2269], cookie=17771252063811703197) 2025-07-08T11:58:23.437732Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2272], cookie=17759250760795587808, ids=[1, 1], paths=[], recursive=0) 2025-07-08T11:58:23.437737Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2272], cookie=17759250760795587808) 2025-07-08T11:58:23.437783Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:262:2275], cookie=5937290318631477007, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-07-08T11:58:23.437789Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:262:2275], cookie=5937290318631477007) 2025-07-08T11:58:23.437834Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:265:2278], cookie=5372368592006071345, ids=[], paths=[], recursive=1) 2025-07-08T11:58:23.437842Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:265:2278], cookie=5372368592006071345) 2025-07-08T11:58:23.437908Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:268:2281], cookie=16800815849539517932, ids=[], paths=[], recursive=0) 2025-07-08T11:58:23.437914Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:268:2281], cookie=16800815849539517932) 2025-07-08T11:58:23.437972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:271:2284], cookie=12084286778550953851, ids=[3, 2], paths=[], recursive=1) 2025-07-08T11:58:23.437978Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:271:2284], cookie=12084286778550953851) 2025-07-08T11:58:23.438028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:274:2287], cookie=2772556493966652090, ids=[3, 2], paths=[], recursive= ... BUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.078331Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.078503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2159], cookie=5545818259891344378, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-07-08T11:58:25.078581Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:25.089431Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2159], cookie=5545818259891344378) 2025-07-08T11:58:25.089627Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:142:2166], cookie=16685989874932404198, path="/Root/Q", config={ }) 2025-07-08T11:58:25.089709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-07-08T11:58:25.101216Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:142:2166], cookie=16685989874932404198) 2025-07-08T11:58:25.101424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:147:2171], cookie=14107750951736613911, path="/Root/Folder", config={ }) 2025-07-08T11:58:25.101498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-07-08T11:58:25.112377Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:147:2171], cookie=14107750951736613911) 2025-07-08T11:58:25.112556Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:152:2176], cookie=6446901545813492270, path="/Root/Folder/Q1", config={ }) 2025-07-08T11:58:25.112619Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-07-08T11:58:25.123568Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:152:2176], cookie=6446901545813492270) 2025-07-08T11:58:25.123742Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:157:2181], cookie=14732075640728694356, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.123767Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:157:2181], cookie=14732075640728694356) 2025-07-08T11:58:25.123878Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:163:2187], cookie=14453668439887384342, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.123886Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:163:2187], cookie=14453668439887384342) 2025-07-08T11:58:25.123989Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:169:2193], cookie=13463682480550714011, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.123997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:169:2193], cookie=13463682480550714011) 2025-07-08T11:58:25.124055Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:172:2196], cookie=18068094837900297431, id=0, path="/Root/Folder/NonexistingRes") 2025-07-08T11:58:25.124070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:172:2196], cookie=18068094837900297431) 2025-07-08T11:58:25.124117Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:175:2199], cookie=1608154078153121544, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.124125Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:175:2199], cookie=1608154078153121544) 2025-07-08T11:58:25.124176Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:178:2202], cookie=4658111105831268051, id=100, path="") 2025-07-08T11:58:25.124183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:178:2202], cookie=4658111105831268051) 2025-07-08T11:58:25.124236Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:181:2205], cookie=13727538892764200907, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.124244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:181:2205], cookie=13727538892764200907) 2025-07-08T11:58:25.124298Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:184:2208], cookie=2198615443408277888, id=3, path="") 2025-07-08T11:58:25.124311Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:184:2208], cookie=2198615443408277888) 2025-07-08T11:58:25.124365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:187:2211], cookie=6088293959260647963, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.124373Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:187:2211], cookie=6088293959260647963) 2025-07-08T11:58:25.124434Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:190:2214], cookie=2232348325671234008, id=0, path="/Root/Folder/Q1") 2025-07-08T11:58:25.124479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-07-08T11:58:25.137751Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:190:2214], cookie=2232348325671234008) 2025-07-08T11:58:25.137911Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:195:2219], cookie=10492163106395569247, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.137929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:195:2219], cookie=10492163106395569247) 2025-07-08T11:58:25.140387Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:25.140419Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:25.140479Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:25.140542Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.190248Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.190342Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:235:2249], cookie=11038383879201498202, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.190360Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:235:2249], cookie=11038383879201498202) 2025-07-08T11:58:25.190475Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:241:2254], cookie=12763457226424542719, id=3, path="") 2025-07-08T11:58:25.190504Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-07-08T11:58:25.201230Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:241:2254], cookie=12763457226424542719) 2025-07-08T11:58:25.201382Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:246:2259], cookie=1531636932169307868, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.201404Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:246:2259], cookie=1531636932169307868) 2025-07-08T11:58:25.203923Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:25.203951Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:25.203994Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:25.204109Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.252027Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.252172Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:286:2289], cookie=18438010281543366597, ids=[], paths=[], recursive=1) 2025-07-08T11:58:25.252200Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:286:2289], cookie=18438010281543366597) 2025-07-08T11:58:25.881566Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:25.881602Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:25.905170Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:25.905254Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.933683Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.933800Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:133:2159], cookie=1150477609810407272, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:25.933846Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-07-08T11:58:25.949275Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:133:2159], cookie=1150477609810407272) 2025-07-08T11:58:25.949445Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:142:2166], cookie=7149023739784601002, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-07-08T11:58:25.949491Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-07-08T11:58:25.961603Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:142:2166], cookie=7149023739784601002) 2025-07-08T11:58:25.962022Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2171]. Cookie: 15830970417017539526. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:25.962036Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2171], cookie=15830970417017539526) 2025-07-08T11:58:25.962153Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2171]. Cookie: 13150407523074092283. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-07-08T11:58:25.962160Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2171], cookie=13150407523074092283) |63.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[select-use_cluster-default.txt-Results] [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> KqpCost::OlapRangeFullScan >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] >> KqpCost::OltpWriteRow-isSink [GOOD] >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 63976, MsgBus: 61616 2025-07-08T11:58:26.386850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679221492552754:2142];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b7f/r3tmp/tmpKXAdq1/pdisk_1.dat 2025-07-08T11:58:26.426680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:26.452377Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63976, node 1 2025-07-08T11:58:26.523172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:26.523199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:26.523641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:26.523649Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:26.523651Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:26.523695Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:26.525261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61616 TClient is connected to server localhost:61616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:26.587161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:26.590924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:26.597919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:58:26.685987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:26.730693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:26.796567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:26.998016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.018004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.073760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.090833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.154172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.178960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.206573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.387242Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:27.412266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 query_phases { duration_us: 156 cpu_time_us: 156 } query_phases { duration_us: 1787 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 338 affected_shards: 1 } compilation { duration_us: 16728 cpu_time_us: 14514 } process_cpu_time_us: 252 total_duration_us: 19645 total_cpu_time_us: 15260 query_phases { duration_us: 117 cpu_time_us: 117 } query_phases { duration_us: 1575 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 357 affected_shards: 1 } compilation { duration_us: 9784 cpu_time_us: 8797 } process_cpu_time_us: 216 total_duration_us: 12311 total_cpu_time_us: 9487 2025-07-08T11:58:27.524150Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679225787522626:2481], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDYzNjg2Y2EtZjEyZDNjNmItMzQ3YmUwMGUtYjE1NTQxMjA=. TraceId : 01jzmydm5005sexxvkampf1k2d. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T11:58:27.524355Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679225787522627:2482], TxId: 281474976715675, task: 2. Ctx: { TraceId : 01jzmydm5005sexxvkampf1k2d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZDYzNjg2Y2EtZjEyZDNjNmItMzQ3YmUwMGUtYjE1NTQxMjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [1:7524679225787522623:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T11:58:27.524420Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDYzNjg2Y2EtZjEyZDNjNmItMzQ3YmUwMGUtYjE1NTQxMjA=, ActorId: [1:7524679225787522455:2446], ActorState: ExecuteState, TraceId: 01jzmydm5005sexxvkampf1k2d, Create QueryResponse for error on request, msg: query_phases { duration_us: 230 cpu_time_us: 230 } query_phases { duration_us: 1448 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 937 affected_shards: 1 } query_phases { duration_us: 2421 cpu_time_us: 2236 } compilation { duration_us: 24748 cpu_time_us: 21799 } process_cpu_time_us: 452 total_duration_us: 35647 total_cpu_time_us: 25654 query_phases { duration_us: 159 cpu_time_us: 159 } query_phases { duration_us: 862 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 750 affected_shards: 1 } query_phases { duration_us: 468 cpu_time_us: 508 } query_phases { duration_us: 1892 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 371 affected_shards: 1 } compilation { duration_us: 29076 cpu_time_us: 27277 } process_cpu_time_us: 435 total_duration_us: 33731 total_cpu_time_us: 29500 query_phases { duration_us: 183 cpu_time_us: 183 } query_phases { duration_us: 1427 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 820 affected_shards: 1 } query_phases { duration_us: 477 cpu_time_us: 188 affected_shards: 1 } compilation { duration_us: 26009 cpu_time_us: 24823 } process_cpu_time_us: 406 total_duration_us: 29547 total_cpu_time_us: 26420 query_phases { duration_us: 237 cpu_time_us: 237 } query_phases { duration_us: 1251 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 786 affected_shards: 1 } query_phases { duration_us: 2398 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 401 affected_shards: 1 } compilation { duration_us: 28090 cpu_time_us: 26583 } process_cpu_time_us: 469 total_duration_us: 35681 total_cpu_time_us: 28476 query_phases { duration_us: 124 cpu_time_us: 124 } query_phases { duration_us: 1464 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 314 affected_shards: 1 } compilation { duration_us: 13039 cpu_time_us: 9463 } process_cpu_time_us: 214 total_duration_us: 15746 total_cpu_time_us: 10115 query_phases { duration_us: 114 cpu_time_us: 114 } query_phases { duration_us: 1495 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 320 affected_shards: 1 } compilation { duration_us: 9820 cpu_time_us: 8775 } process_cpu_time_us: 397 total_duration_us: 12441 total_cpu_time_us: 9606 >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpCost::OlapRangeFullScan [GOOD] >> KqpCost::QuerySeviceRangeFullScan >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable |63.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream |63.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 13318, MsgBus: 7882 2025-07-08T11:58:27.402830Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679224969957912:2239];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:27.405096Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b6d/r3tmp/tmp7sTSEo/pdisk_1.dat 2025-07-08T11:58:27.466567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13318, node 1 2025-07-08T11:58:27.485143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:27.485153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:27.485155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:27.485190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7882 TClient is connected to server localhost:7882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:27.538969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:27.538995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:27.539690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.540540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:58:27.553579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:27.589681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:27.678987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:27.718039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:27.741844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:27.842832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.868277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.889544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.926395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.946447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:27.975998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.042069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.346517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.391763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:28.391837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:28.391884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:28.391903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:28.391923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:28.391942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:28.391962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:28.391982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:28.392001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:28.392023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:28.392041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:28.392062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:28.392266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:28.392280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:28.392328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:28.392347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:28.392364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:28.392386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:28.392404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:28.392423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:28.392441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:28.392459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:28.392477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_r ... ct.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:28.433551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:28.433558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:28.433565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:28.433588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:28.433592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:28.433610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:28.433615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:28.433626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:28.433632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:28.433638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:28.433643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:28.433648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:28.433696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:28.433701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:28.433712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:28.433716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-07-08T11:58:28.434278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679229264927648:2458];ev=NActors::IEventHandle;tablet_id=72075186224037922;tx_id=281474976715670;this=94638406531680;method=TTxController::StartProposeOnExecute;tx_info=281474976715670:TX_KIND_SCHEMA;min=1751975908434;max=18446744073709551615;plan=0;src=[1:7524679224969958097:2199];cookie=352:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.434358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679229264927650:2459];ev=NActors::IEventHandle;tablet_id=72075186224037925;tx_id=281474976715670;this=94638406374720;method=TTxController::StartProposeOnExecute;tx_info=281474976715670:TX_KIND_SCHEMA;min=1751975908434;max=18446744073709551615;plan=0;src=[1:7524679224969958097:2199];cookie=382:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.434488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7524679229264927639:2454];ev=NActors::IEventHandle;tablet_id=72075186224037926;tx_id=281474976715670;this=94638406531360;method=TTxController::StartProposeOnExecute;tx_info=281474976715670:TX_KIND_SCHEMA;min=1751975908434;max=18446744073709551615;plan=0;src=[1:7524679224969958097:2199];cookie=392:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.435646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679229264927637:2452];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976715670;this=94638406532960;method=TTxController::StartProposeOnExecute;tx_info=281474976715670:TX_KIND_SCHEMA;min=1751975908431;max=18446744073709551615;plan=0;src=[1:7524679224969958097:2199];cookie=432:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.437770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7524679229264927638:2453];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976715670;this=94638406531360;method=TTxController::StartProposeOnExecute;tx_info=281474976715670:TX_KIND_SCHEMA;min=1751975908437;max=18446744073709551615;plan=0;src=[1:7524679224969958097:2199];cookie=412:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.443054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.445251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.445406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.446278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.446375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.447319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.447411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.448665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.448763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.449215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.450552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.450751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.451564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.451985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.452051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.452219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.453109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.453369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:28.454540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.456905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715670; 2025-07-08T11:58:28.494877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715672; 2025-07-08T11:58:28.494967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715672; 2025-07-08T11:58:28.495224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;self_id=[1:7524679229264927666:2461];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037931;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037926;receive=72075186224037930; 2025-07-08T11:58:28.495270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715672; query_phases { duration_us: 14850 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 3287 } compilation { duration_us: 27699 cpu_time_us: 26757 } process_cpu_time_us: 108 total_duration_us: 44483 total_cpu_time_us: 30152 >> test.py::test[aggregate-group_by_hop_compact--Results] >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> KqpCost::OlapPointLookup >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 3337, MsgBus: 65329 2025-07-08T11:58:28.341633Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679229352014523:2245];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:28.345869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b39/r3tmp/tmp8XrhZa/pdisk_1.dat 2025-07-08T11:58:28.409770Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3337, node 1 2025-07-08T11:58:28.441560Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:28.441572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:28.441575Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:28.441619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65329 2025-07-08T11:58:28.466390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:28.466416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:28.469702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:58:28.525104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.527925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.540615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.566337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.586491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.600108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.765296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.801717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.819086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.846485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.868108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.933566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.956821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.197482Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-07-08T11:58:29.218628Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7524679233646984095:2446] 2025-07-08T11:58:29.218647Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7524679233646984082:2446] 2025-07-08T11:58:29.229640Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1751975909271:281474976715670 created 2025-07-08T11:58:29.229774Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 3, stages: 2 2025-07-08T11:58:29.229796Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-07-08T11:58:29.229800Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-07-08T11:58:29.229904Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key sets: 1 2025-07-08T11:58:29.229950Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-07-08T11:58:29.229963Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Start resolving tablets nodes... (1) 2025-07-08T11:58:29.230029Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2025-07-08T11:58:29.230040Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Shards nodes resolved, success: 1, failed: 0 2025-07-08T11:58:29.230048Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Shards on nodes: node 1: [72075186224037914] 2025-07-08T11:58:29.230058Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-07-08T11:58:29.230080Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '($1) (block '( (let $2 (lambda '($5) (block '( (let $6 (Member $5 '"Amount")) (return $6 (Member $5 '"Comment") (Member $5 '"Group") (Member $5 '"Name") (Coalesce (< $6 (Uint64 '"5000")) (Bool 'false))) )))) (let $3 (WideFilter (ExpandMap (ToFlow $1) $2) (lambda '($7 $8 $9 $10 $11) $11) (Uint64 '1))) (let $4 (lambda '($12 $13 $14 $15 $16) $12 $13 $14 $15)) (return (FromFlow (WideMap $3 $4))) )))) ) 2025-07-08T11:58:29.230150Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,1] AST: ( (return (lambda '($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 '1)) (lambda '($2 $3 $4 $5) (AsStruct '('"Amount" $2) '('"Comment" $3) '('"Group" $4) '('"Name" $5))))))) ) 2025-07-08T11:58:29.230165Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,1] create compute task: 2 2025-07-08T11:58:29.230174Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Stage [0,1] create channelId: 1 from task: 1 to task: 2 of type Merge/Map without spilling 2025-07-08T11:58:29.230182Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 2 from task: 2 with index: 0 2025-07-08T11:58:29.230189Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TotalShardScans: 0 2025-07-08T11:58:29.230203Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {281474976715670, 1751975909271} 2025-07-08T11:58:29.230316Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=0; 2025-07-08T11:58:29.230392Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_s ... d=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:58:29.232757Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-07-08T11:58:29.232761Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. enter pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-07-08T11:58:29.232776Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. exit pack cells method shardId: 72075186224037914 processedRows: 0 packed rows: 3 freeSpace: 8388548 2025-07-08T11:58:29.232782Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. returned 3 rows; processed 3 rows 2025-07-08T11:58:29.232810Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. dropping batch for read #0 2025-07-08T11:58:29.232811Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. effective maxinflight 1 sorted 1 2025-07-08T11:58:29.232813Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-07-08T11:58:29.232815Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1, CA Id [1:7524679233646984110:2450]. returned async data processed rows 3 left freeSpace 8388548 received rows 3 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-07-08T11:58:29.232889Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984110:2450], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmydnta39vrawdka93w5zye. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:58:29.232892Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984110:2450], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmydnta39vrawdka93w5zye. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:58:29.232899Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-07-08T11:58:29.232904Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984111:2451], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CustomerSuppliedId : . TraceId : 01jzmydnta39vrawdka93w5zye. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646923 2025-07-08T11:58:29.232914Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 2. Finish input channelId: 1, from: [1:7524679233646984110:2450] 2025-07-08T11:58:29.232922Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984111:2451], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CustomerSuppliedId : . TraceId : 01jzmydnta39vrawdka93w5zye. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:58:29.232980Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984111:2451], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CustomerSuppliedId : . TraceId : 01jzmydnta39vrawdka93w5zye. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:58:29.232984Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984110:2450], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmydnta39vrawdka93w5zye. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646927 2025-07-08T11:58:29.232989Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984110:2450], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmydnta39vrawdka93w5zye. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:58:29.232992Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1. Tasks execution finished 2025-07-08T11:58:29.233005Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984110:2450], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmydnta39vrawdka93w5zye. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Compute state finished. All channels and sinks finished 2025-07-08T11:58:29.233031Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 1. pass away 2025-07-08T11:58:29.233063Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715671;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T11:58:29.233075Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send TEvStreamData to [1:7524679233646984082:2446], seqNo: 1, nRows: 1 2025-07-08T11:58:29.233104Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715671, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-07-08T11:58:29.233128Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:7524679233646984110:2450], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2203 Tasks { TaskId: 1 CpuTimeUs: 288 FinishTimeMs: 1751975909232 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 42 BuildCpuTimeUs: 246 HostName: "ghrun-3z2hjo4icm" NodeId: 1 StartTimeMs: 1751975909232 CreateTimeMs: 1751975909230 UpdateTimeMs: 1751975909233 } MaxMemoryUsage: 1048576 } 2025-07-08T11:58:29.233153Z node 1 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [1:7524679233646984110:2450] 2025-07-08T11:58:29.233168Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [1:7524679233646984111:2451], 2025-07-08T11:58:29.233765Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715671, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7524679233646984113:2451] 2025-07-08T11:58:29.233784Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984111:2451], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CustomerSuppliedId : . TraceId : 01jzmydnta39vrawdka93w5zye. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:58:29.233796Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-07-08T11:58:29.233801Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 2. Tasks execution finished 2025-07-08T11:58:29.233803Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679233646984111:2451], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=. CustomerSuppliedId : . TraceId : 01jzmydnta39vrawdka93w5zye. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Compute state finished. All channels and sinks finished 2025-07-08T11:58:29.233816Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715671, task: 2. pass away 2025-07-08T11:58:29.233832Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715671;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T11:58:29.233857Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [1:7524679233646984111:2451], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1845 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 287 FinishTimeMs: 1751975909233 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 66 BuildCpuTimeUs: 221 HostName: "ghrun-3z2hjo4icm" NodeId: 1 CreateTimeMs: 1751975909230 UpdateTimeMs: 1751975909233 } MaxMemoryUsage: 1048576 } 2025-07-08T11:58:29.233866Z node 1 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [1:7524679233646984111:2451] 2025-07-08T11:58:29.233867Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715671, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-07-08T11:58:29.233898Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-07-08T11:58:29.233907Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7524679233646984106:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnta39vrawdka93w5zye, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjJiZjNlODMtY2RjYmNmNTktODdiMDIzMWItMTcxMDllZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.004048s ReadRows: 1 ReadBytes: 20 ru: 2 rate limiter was not found force flag: 1 2025-07-08T11:58:29.234184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975909271, txId: 281474976715670] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 13769, MsgBus: 10206 2025-07-08T11:58:27.770194Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679226760257427:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:27.771947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b71/r3tmp/tmppBqUvS/pdisk_1.dat 2025-07-08T11:58:27.940918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13769, node 1 2025-07-08T11:58:27.962070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:27.962094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:27.964447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:28.036969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:28.036985Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:28.036987Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:28.037039Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10206 TClient is connected to server localhost:10206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:28.202783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.213288Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.262095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.341669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.386027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.408121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.541666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.551973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.563734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.572802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.639538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.653773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.664402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:28.757644Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:28.867526Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679231055227218:2451] TxId: 281474976715671. Ctx: { TraceId: 01jzmydnez4qvwpj66snq49z37, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA3NzA5ZWItY2I1YTk2MGItMTQ4NTA4MzMtOTBlOTYxMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T11:58:28.873930Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975908914, txId: 281474976715670] shutting down |63.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |63.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest |63.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> Describe::Statistics [GOOD] >> Describe::Location |63.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-07-08T11:58:20.629593Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.629630Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.637367Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.637414Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.658528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.659688Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:131:2157], cookie=7309590734762522728, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:20.659804Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:20.685283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:131:2157], cookie=7309590734762522728) 2025-07-08T11:58:20.685478Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2165], cookie=208013318868533573, path="/Root/Res", config={ }) 2025-07-08T11:58:20.685539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-07-08T11:58:20.696427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2165], cookie=208013318868533573) 2025-07-08T11:58:20.696906Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:146:2170]. Cookie: 9997976250361325514. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:20.696919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:146:2170], cookie=9997976250361325514) 2025-07-08T11:58:20.697015Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:146:2170]. Cookie: 2845759085941680309. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-07-08T11:58:20.697024Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:146:2170], cookie=2845759085941680309) 2025-07-08T11:58:23.210919Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.210952Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.215346Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.215399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.249251Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.249422Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:133:2159], cookie=9033664247398553864, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:23.249507Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:23.261734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:133:2159], cookie=9033664247398553864) 2025-07-08T11:58:23.262004Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:143:2167]. Cookie: 1172331655494301136. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:23.262019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:143:2167], cookie=1172331655494301136) 2025-07-08T11:58:23.262131Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:143:2167]. Cookie: 15045089582632354208. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:23.262139Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:143:2167], cookie=15045089582632354208) 2025-07-08T11:58:23.262209Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:143:2167]. Cookie: 9332872379622335358. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-07-08T11:58:23.262217Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:143:2167], cookie=9332872379622335358) 2025-07-08T11:58:23.262266Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:143:2167]. Cookie: 2014196415855651010. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-07-08T11:58:23.262271Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:143:2167], cookie=2014196415855651010) 2025-07-08T11:58:25.362169Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:25.362203Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:25.366619Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:25.366653Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.378269Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.378398Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:133:2159], cookie=17954166893599802583, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:25.378474Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:25.399884Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:133:2159], cookie=17954166893599802583) 2025-07-08T11:58:25.400032Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2167], cookie=6941836064672841751, path="/Root/Res1", config={ }) 2025-07-08T11:58:25.400075Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-07-08T11:58:25.414320Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2167], cookie=6941836064672841751) 2025-07-08T11:58:25.414475Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:148:2172], cookie=16855273502507592263, path="/Root/Res2", config={ }) 2025-07-08T11:58:25.414530Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-07-08T11:58:25.425522Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:148:2172], cookie=16855273502507592263) 2025-07-08T11:58:25.425749Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:153:2177]. Cookie: 16310693684655233600. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:25.425759Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:153:2177], cookie=16310693684655233600) 2025-07-08T11:58:25.425831Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:153:2177]. Cookie: 6563214523292761903. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:25.425836Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:153:2177], cookie=6563214523292761903) 2025-07-08T11:58:25.425919Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:153:2177]. Cookie: 12884883264928786142. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-07-08T11:58:25.425925Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:153:2177], cookie=12884883264928786142) 2025-07-08T11:58:27.736102Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:27.736143Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:27.740054Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:27.740093Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:27.768007Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:27.768259Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2159], cookie=17140905057479991151, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-07-08T11:58:27.768409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:27.781237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2159], cookie=17140905057479991151) 2025-07-08T11:58:27.781555Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2167]. Cookie: 2533174481383306847. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:27.781565Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2167], cookie=2533174481383306847) 2025-07-08T11:58:27.781619Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2167]. Cookie: 11690925274705826658. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-07-08T11:58:27.781624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:143:2167], cookie=11690925274705826658) 2025-07-08T11:58:29.834945Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:29.834981Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:29.849362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:29.849489Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:29.872794Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:29.872970Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:133:2159], cookie=1460392736410619997, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-07-08T11:58:29.873032Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:29.883909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:133:2159], cookie=1460392736410619997) 2025-07-08T11:58:29.884070Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:142:2166], cookie=8775600328228785173, path="/Root/Res", config={ }) 2025-07-08T11:58:29.884132Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-07-08T11:58:29.896982Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:142:2166], cookie=8775600328228785173) 2025-07-08T11:58:29.897254Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:147:2171]. Cookie: 14732465657670666640. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:29.897264Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:147:2171], cookie=14732465657670666640) 2025-07-08T11:58:29.897348Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:151:2175], cookie=3094301943104838829, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-07-08T11:58:29.897407Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-07-08T11:58:29.897440Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:147:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:29.910345Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:151:2175], cookie=3094301943104838829) 2025-07-08T11:58:29.910523Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:147:2171]. Cookie: 3889314537732673408. Data: { } 2025-07-08T11:58:29.910531Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:147:2171], cookie=3889314537732673408) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 61866, MsgBus: 6102 2025-07-08T11:58:28.790903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679228285296142:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:28.791029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b26/r3tmp/tmpRMvdTb/pdisk_1.dat 2025-07-08T11:58:28.844271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61866, node 1 2025-07-08T11:58:28.863841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:28.863853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:28.863854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:28.863893Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6102 2025-07-08T11:58:28.890281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:28.890328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:28.891222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6102 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:28.969759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.976689Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:28.986133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:29.011992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:29.073893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.094752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:29.219823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.229304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.242149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.255211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.269525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.282660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.295114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> TReplicationTests::CopyReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 16586, MsgBus: 4919 2025-07-08T11:58:28.757381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679231938640955:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:28.757412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b2d/r3tmp/tmp3NHXgm/pdisk_1.dat 2025-07-08T11:58:28.842500Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16586, node 1 2025-07-08T11:58:28.859438Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:28.859451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:28.859453Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:28.859497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4919 TClient is connected to server localhost:4919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T11:58:28.918568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:28.918595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:28.922174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:28.937699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:28.943232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:58:28.952516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:29.023276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.046487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:29.061310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:29.212453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.230176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.294334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.307589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.328672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.349365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.367763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.643451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.659405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.690250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-07-08T11:58:29.762273Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; /Root/Join1_2 1 19 /Root/Join1_1 8 136 |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TPopulatorTest::Boot >> test.py::test[count-count_all_grouped--ForceBlocks] [GOOD] >> test.py::test[count-count_all_grouped--Results] >> TPopulatorTest::Boot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:56:19.492369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:56:19.492398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:19.492404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:56:19.492409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:56:19.492419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:56:19.492423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:56:19.492432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:19.492445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:56:19.492524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:56:19.503303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:56:19.503319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:56:19.506903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:56:19.506959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:56:19.506984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:56:19.508395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:56:19.508442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:56:19.508535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.508706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:56:19.509564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.509599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:56:19.509817Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:19.509827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.509843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:56:19.509860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:19.509865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:56:19.509889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.511402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:56:19.525096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:56:19.525156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.525200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:56:19.525242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:56:19.525251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.525947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.525976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:56:19.526027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.526042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:56:19.526047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:56:19.526052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:56:19.526993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.527006Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:19.527011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:56:19.527363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.527372Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.527377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.527383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.527951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:56:19.528324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:56:19.528372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:56:19.528549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:19.528571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:56:19.528581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.528645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:56:19.528651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:19.528678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:56:19.528689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:56:19.529055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:19.529063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:19.529098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:19.529103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:56:19.529112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:19.529118Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:56:19.529127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:19.529131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.529136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:19.529139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.529144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:56:19.529149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:19.529153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:56:19.529157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:56:19.529167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:56:19.529172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:56:19.529176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:56:19.529534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:56:19.529549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... rsion: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.662620Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.662624Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:58:29.663010Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.663024Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.663028Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:58:29.663114Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-07-08T11:58:29.663120Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:29.663182Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-07-08T11:58:29.663217Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-07-08T11:58:29.663221Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-07-08T11:58:29.663227Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-07-08T11:58:29.663230Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-07-08T11:58:29.663235Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-07-08T11:58:29.663416Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.663426Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T11:58:29.663430Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T11:58:29.663434Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:58:29.663438Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:29.663450Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-07-08T11:58:29.663491Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:58:29.663496Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:29.663524Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:58:29.663540Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-07-08T11:58:29.663544Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-07-08T11:58:29.663549Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-07-08T11:58:29.663552Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-07-08T11:58:29.663556Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-07-08T11:58:29.663566Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:377:2345] message: TxId: 103 2025-07-08T11:58:29.663571Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-07-08T11:58:29.663577Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:58:29.663580Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:58:29.663601Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:58:29.663605Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-07-08T11:58:29.663608Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-07-08T11:58:29.663613Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:29.663617Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-07-08T11:58:29.663620Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-07-08T11:58:29.663626Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:29.663633Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-07-08T11:58:29.663636Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-07-08T11:58:29.663641Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:58:29.663645Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-07-08T11:58:29.663648Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-07-08T11:58:29.663657Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-07-08T11:58:29.663713Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:58:29.663737Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-07-08T11:58:29.663748Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:58:29.663753Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:58:29.663758Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:58:29.664177Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664193Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664205Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664210Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664215Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664653Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T11:58:29.664697Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:58:29.664703Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:757:2660] 2025-07-08T11:58:29.664737Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-07-08T11:58:29.664829Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:58:29.664864Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 45us result status StatusPathDoesNotExist 2025-07-08T11:58:29.664900Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:58:29.664973Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:58:29.664988Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 17us result status StatusPathDoesNotExist 2025-07-08T11:58:29.665003Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:25.972364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:25.972390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:25.972394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:25.972399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:25.972412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:25.972416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:25.972429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:25.972443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:25.972522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:25.995717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:25.995741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:26.001231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:26.001296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:26.001330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:26.002755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:26.002802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:26.002909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.003087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:26.003792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.003828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:26.004053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:26.004064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.004079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:26.004085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:26.004090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:26.004115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.005394Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:26.025514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:26.025596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.025660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:26.025700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:26.025710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.026563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.026590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:26.026639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.026648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:26.026653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:26.026658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:26.027059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.027070Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:26.027074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:26.027681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.027693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.027699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.027706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.028270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:26.028792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:26.028833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:26.029033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.029060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:26.029067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.029146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:26.029153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.029183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:26.029196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:26.029615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:26.029624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:26.029671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.029678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:26.029691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.029697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:26.029709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:26.029713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.029719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:26.029722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.029726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:26.029732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.029737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:26.029742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:26.029754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:26.029760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:26.029765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:26.030174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:26.030192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 25:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:58:30.491087Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-07-08T11:58:30.491112Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:30.491121Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:30.491172Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:58:30.491203Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:30.491209Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-07-08T11:58:30.491214Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-07-08T11:58:30.491226Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.491235Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T11:58:30.491250Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:30.491256Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:58:30.491261Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.491267Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:58:30.491270Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.491275Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-07-08T11:58:30.491282Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.491288Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:58:30.491292Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:58:30.491327Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:58:30.491334Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T11:58:30.491338Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-07-08T11:58:30.491343Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-07-08T11:58:30.491653Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-07-08T11:58:30.491661Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-07-08T11:58:30.491682Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:58:30.491693Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:58:30.491698Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:58:30.491704Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T11:58:30.491710Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:30.491725Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:30.491942Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-07-08T11:58:30.491949Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-07-08T11:58:30.491958Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:58:30.491967Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T11:58:30.491971Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T11:58:30.491975Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T11:58:30.491979Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:58:30.491991Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T11:58:30.491995Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:30.497062Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-07-08T11:58:30.497087Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-07-08T11:58:30.497110Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:58:30.497121Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:58:30.497150Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:30.501276Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:30.509274Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:30.509300Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:30.509596Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:30.509602Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:30.509619Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:58:30.509685Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T11:58:30.509696Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T11:58:30.509767Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T11:58:30.509774Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T11:58:30.509778Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-07-08T11:58:30.509817Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:365:2344], Recipient [9:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-07-08T11:58:30.509822Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-07-08T11:58:30.509837Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:58:30.509862Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:30.509867Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:447:2402] 2025-07-08T11:58:30.509891Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:30.509896Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:30.509899Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-07-08T11:58:30.509956Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:450:2405], Recipient [9:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-07-08T11:58:30.509961Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-07-08T11:58:30.509973Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:30.510009Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 35us result status StatusPathDoesNotExist 2025-07-08T11:58:30.510041Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:26.575773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:26.575798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:26.575803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:26.575808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:26.575817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:26.575820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:26.575911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:26.575923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:26.576174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:26.607485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:26.607509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:26.633615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:26.633680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:26.633705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:26.646581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:26.646634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:26.646813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.647032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:26.649069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.649106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:26.649306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:26.649314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.649328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:26.649335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:26.649340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:26.649360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.650665Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:26.733350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:26.733424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.733482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:26.733517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:26.733526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.741368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.741402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:26.741455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.741467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:26.741473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:26.741478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:26.752224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.752252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:26.752260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:26.761290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.761319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.761327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.761335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.761997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:26.769189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:26.769248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:26.769439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:26.769472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:26.769481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.769559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:26.769567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:26.769597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:26.769609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:26.774979Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:26.774994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:26.775045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:26.775051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:26.775061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:26.775068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:26.775082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:26.775088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.775093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:26.775095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.775099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:26.775105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:26.775109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:26.775113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:26.775130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:26.775136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:26.775140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:26.775577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:26.775591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... MESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:58:30.605432Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-07-08T11:58:30.606762Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:30.606780Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T11:58:30.607200Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 246 } } 2025-07-08T11:58:30.607208Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:30.607223Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 246 } } 2025-07-08T11:58:30.607234Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 4 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 246 } } 2025-07-08T11:58:30.607326Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.607331Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:30.607341Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.607346Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:58:30.607352Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 410 RawX2: 34359740747 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.607362Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:30.607365Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2025-07-08T11:58:30.608381Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.608416Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.621807Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 34359740666 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.621830Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-07-08T11:58:30.621854Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 311 RawX2: 34359740666 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.621861Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:58:30.621871Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 311 RawX2: 34359740666 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:58:30.621886Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:30.621891Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.621895Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T11:58:30.621900Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:58:30.621905Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-07-08T11:58:30.622361Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.622441Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.622449Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-07-08T11:58:30.622458Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-07-08T11:58:30.622463Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-07-08T11:58:30.622474Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-07-08T11:58:30.622478Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-07-08T11:58:30.622848Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:30.622859Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T11:58:30.622871Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:58:30.622875Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.622882Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T11:58:30.622885Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.622890Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-07-08T11:58:30.622902Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:337:2316] message: TxId: 102 2025-07-08T11:58:30.622909Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T11:58:30.622915Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:58:30.622919Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:58:30.622952Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:30.622956Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:58:30.623346Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:30.623358Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:440:2401] TestWaitNotification: OK eventTxId 102 2025-07-08T11:58:30.623457Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:30.623508Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 56us result status StatusSuccess 2025-07-08T11:58:30.623610Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-07-08T11:58:30.906097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:30.906120Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-07-08T11:58:30.927671Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2110], cookie# 100, event size# 330, preserialized size# 51 2025-07-08T11:58:30.927711Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-07-08T11:58:30.927960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 2025-07-08T11:58:30.927974Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 2025-07-08T11:58:30.927982Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 2025-07-08T11:58:30.928042Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2110], cookie# 100, event size# 220, preserialized size# 2 2025-07-08T11:58:30.928048Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-07-08T11:58:30.928063Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 2025-07-08T11:58:30.928069Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 2025-07-08T11:58:30.928075Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-07-08T11:58:30.928792Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2110], cookie# 100, event size# 340, preserialized size# 56 2025-07-08T11:58:30.928802Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-07-08T11:58:30.928900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:71:2110], cookie# 100, event size# 225, preserialized size# 2 2025-07-08T11:58:30.928906Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-07-08T11:58:30.953133Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-07-08T11:58:30.953158Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:12:2059] 2025-07-08T11:58:30.953169Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:58:30.953180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-07-08T11:58:30.953184Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Successful handshake: replica# [1:15:2062] 2025-07-08T11:58:30.953188Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:58:30.953194Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-07-08T11:58:30.953198Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:99:2125] Successful handshake: replica# [1:18:2065] 2025-07-08T11:58:30.953201Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:99:2125] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:58:30.953218Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:97:2123] 2025-07-08T11:58:30.953238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2122] 2025-07-08T11:58:30.953266Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-07-08T11:58:30.953275Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953286Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-07-08T11:58:30.953295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-07-08T11:58:30.953306Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:98:2124] 2025-07-08T11:58:30.953312Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2122] 2025-07-08T11:58:30.953331Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2124] 2025-07-08T11:58:30.953337Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-07-08T11:58:30.953343Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953353Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2124] 2025-07-08T11:58:30.953361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-07-08T11:58:30.953369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:99:2125] 2025-07-08T11:58:30.953374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953381Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:96:2122] 2025-07-08T11:58:30.953392Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-07-08T11:58:30.953408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:99:2125] 2025-07-08T11:58:30.953415Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:99:2125] 2025-07-08T11:58:30.953421Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:97:2123] 2025-07-08T11:58:30.953434Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2122] 2025-07-08T11:58:30.953439Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2122], cookie# 0 2025-07-08T11:58:30.953454Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-07-08T11:58:30.953459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-07-08T11:58:30.953467Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-07-08T11:58:30.953473Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-07-08T11:58:30.953480Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-07-08T11:58:30.953487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-07-08T11:58:30.953495Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:98:2124] 2025-07-08T11:58:30.953501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2122] 2025-07-08T11:58:30.953508Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-07-08T11:58:30.953512Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-07-08T11:58:30.953517Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-07-08T11:58:30.953522Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-07-08T11:58:30.953528Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 0 2025-07-08T11:58:30.953531Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-07-08T11:58:30.953536Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-07-08T11:58:30.953542Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Ack update: ack to# [1:71:2110], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-07-08T11:58:30.953546Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Ack update: ack to# [1:71:2110], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-07-08T11:58:30.953719Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:99:2125] 2025-07-08T11:58:30.953730Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2122] 2025-07-08T11:58:30.953771Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 0 2025-07-08T11:58:30.953776Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-07-08T11:58:30.953781Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:99:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-07-08T11:58:30.953797Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-07-08T11:58:30.953803Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Ack update: ack to# [1:71:2110], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-07-08T11:58:30.953809Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Ack update: ack to# [1:71:2110], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-07-08T11:58:30.953845Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2125], cookie# 0 2025-07-08T11:58:30.953849Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:99:2125], cookie# 0 2025-07-08T11:58:30.953860Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:99:2125], cookie# 100 2025-07-08T11:58:30.953864Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:99:2125], cookie# 100 2025-07-08T11:58:30.953937Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2125], cookie# 0 2025-07-08T11:58:30.953943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:99:2125], cookie# 0 2025-07-08T11:58:30.953964Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:99:2125], cookie# 100 2025-07-08T11:58:30.953967Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Ack for unknown update (already acked?): sender# [1:99:2125], cookie# 100 TestWaitNotification: OK eventTxId 100 >> KqpCost::OlapPointLookup [GOOD] >> test.py::test[join-bush_dis_in_in--Results] [GOOD] >> test.py::test[join-do_not_suppres_equijoin_input_sorts--ForceBlocks] [SKIPPED] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-07-08T11:58:31.378325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:31.378352Z node 1 :IMPORT WARN: Table profiles were not loaded >> test.py::test[join-do_not_suppres_equijoin_input_sorts--Results] [SKIPPED] >> test.py::test[join-full_equal_null-off-ForceBlocks] |63.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14818, MsgBus: 19231 2025-07-08T11:58:30.003727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679237840390044:2236];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000ad9/r3tmp/tmppkhfKX/pdisk_1.dat 2025-07-08T11:58:30.106684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:30.201736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:30.201763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:30.202573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:30.209893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14818, node 1 2025-07-08T11:58:30.249093Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:30.249104Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:30.249106Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:30.249150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19231 TClient is connected to server localhost:19231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:30.435916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:30.445642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T11:58:30.498057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:30.536252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:30.577119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:30.679883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:30.821783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:30.837026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:30.859194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:30.918569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:30.985644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:30.998190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:31.003695Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:31.056248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:31.328328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:31.368036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:31.368089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:31.368143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:31.368162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:31.368180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:31.368199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:31.368219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:31.368238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:31.368256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:31.368273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:31.368298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:31.368317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:31.370233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:31.370290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:31.370333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:31.370352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:31.370370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:31.370390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:31.370410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:31.370429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:31.370446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:31.370465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;self_id=[1:7524679242135359805:2453];tablet_id=72075186224037925;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:31.370484Z node 1 :TX_COLUMNSHARD WARN: tablet_ ... 72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:31.409598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:31.409602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:31.409608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:31.409612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:31.409629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:31.409635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:31.409651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:31.409657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:31.409668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:31.409672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:31.409678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:31.409684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:31.409689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:31.409722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:31.409727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:31.409737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:31.409741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-07-08T11:58:31.410494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[1:7524679242135359807:2455];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976710670;this=93901802710560;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1751975911410;max=18446744073709551615;plan=0;src=[1:7524679237840390250:2204];cookie=442:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.410607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7524679242135359831:2461];ev=NActors::IEventHandle;tablet_id=72075186224037922;tx_id=281474976710670;this=93901802705760;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1751975911410;max=18446744073709551615;plan=0;src=[1:7524679237840390250:2204];cookie=352:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.410783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7524679242135359819:2460];ev=NActors::IEventHandle;tablet_id=72075186224037924;tx_id=281474976710670;this=93901802726240;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1751975911410;max=18446744073709551615;plan=0;src=[1:7524679237840390250:2204];cookie=372:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.411227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[1:7524679242135359809:2457];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976710670;this=93901811087904;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1751975911411;max=18446744073709551615;plan=0;src=[1:7524679237840390250:2204];cookie=432:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.411725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7524679242135359818:2459];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976710670;this=93901802710400;method=TTxController::StartProposeOnExecute;tx_info=281474976710670:TX_KIND_SCHEMA;min=1751975911411;max=18446744073709551615;plan=0;src=[1:7524679237840390250:2204];cookie=412:12;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.417049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.417502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.422238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.424399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.424575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.426287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.426407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.427737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.428049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.429657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.430025Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.430184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.431907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.432003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.433249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.433433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.434812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.434891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=13;result=not_found; 2025-07-08T11:58:31.436209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.439766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710670;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710670; 2025-07-08T11:58:31.483187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710672; 2025-07-08T11:58:31.483519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;self_id=[1:7524679242135359807:2455];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037931;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037930;receive=72075186224037926; 2025-07-08T11:58:31.483664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710672; 2025-07-08T11:58:31.483679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=281474976710672;tx_id=281474976710672;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976710672; 2 |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:127:2153] sender: [1:128:2058] recipient: [1:110:2142] 2025-07-08T11:58:15.384327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:15.384350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.384357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:15.384361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:15.384374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:15.384378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:15.384386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:15.384400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:15.384469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:15.397182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:15.397201Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:15.399280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:15.399335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:15.399359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:15.401673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:15.401727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:15.401819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.401874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:15.402443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.402482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:15.402693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.402703Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.402721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:15.402727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.402733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:15.402759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.403868Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:127:2153] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:15.427550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:15.427638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.427699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:15.427763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:15.427775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.428577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.428623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:15.428693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.428705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:15.428710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:15.428715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:15.429165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.429177Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:15.429182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:15.429493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.429503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.429508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.429515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.430143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:15.430483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:15.430518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:15.430716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:15.430742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:15.430751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.430811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:15.430818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:15.430842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:15.430853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:15.431224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:15.431232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:15.431269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:15.431274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:15.431339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:15.431346Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:15.431356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.431361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.431365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:15.431368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.431372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:15.431377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:15.431382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:15.431386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:15.431396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:15.431401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:15.431405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:15.431698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:15.431711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 94046678944 2025-07-08T11:58:31.298525Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-07-08T11:58:31.298532Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:31.298553Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:31.298620Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.298629Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.298632Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:31.298636Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-07-08T11:58:31.298640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:58:31.298734Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.298757Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.298760Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:31.298764Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-07-08T11:58:31.298768Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:31.298776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-07-08T11:58:31.299299Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:31.299507Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299514Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-07-08T11:58:31.299519Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-07-08T11:58:31.299555Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-07-08T11:58:31.299612Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299628Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 17179871343 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299636Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299654Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-07-08T11:58:31.299663Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-07-08T11:58:31.299666Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:31.299671Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-07-08T11:58:31.299674Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:31.299680Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:31.299688Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:58:31.299692Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-07-08T11:58:31.299697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-07-08T11:58:31.299701Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-07-08T11:58:31.299704Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-07-08T11:58:31.299710Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:31.299714Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-07-08T11:58:31.299717Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-07-08T11:58:31.299720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:58:31.299824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.299839Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.300268Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:31.300277Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:31.300296Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:58:31.300314Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:31.300318Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-07-08T11:58:31.300321Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-07-08T11:58:31.300419Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.300431Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.300435Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:31.300439Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-07-08T11:58:31.300443Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:58:31.300527Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.300534Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.300537Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-07-08T11:58:31.300540Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:58:31.300543Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:58:31.300550Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-07-08T11:58:31.300554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:126:2152] 2025-07-08T11:58:31.304266Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.309217Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-07-08T11:58:31.309244Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-07-08T11:58:31.309257Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-07-08T11:58:31.309266Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T11:58:31.309270Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-07-08T11:58:31.309274Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-07-08T11:58:31.312518Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:31.312538Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:58:31.312544Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:773:2708] TestWaitNotification: OK eventTxId 103 |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |63.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TMonitoringTests::InvalidActorId |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TMonitoringTests::InvalidActorId [GOOD] >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] >> TMonitoringTests::ValidActorId [GOOD] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> KqpSinkTx::OlapSnapshotROInteractive1 |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> test.py::test[window-win_func_with_struct_access-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_with_struct_access-default.txt-Results] >> TCmsTest::RequestReplaceBrokenDevices |63.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut_large/unittest >> TFlatTableLongTxLarge::LargeDeltaChain [GOOD] Test command err: DataBytes = 1073746235 DataPages = 150237 FlatIndexBytes = 3155010 BTreeIndexBytes = 6810652 DataBytes = 1073742150 DataPages = 151523 FlatIndexBytes = 22252537 BTreeIndexBytes = 25927778 DataBytes = 1073753117 DataPages = 148879 FlatIndexBytes = 1072403884 BTreeIndexBytes = 1077128646 DataBytes = 1073744451 DataPages = 150676 FlatIndexBytes = 6479123 BTreeIndexBytes = 7437771 DataBytes = 1073743502 DataPages = 47351 FlatIndexBytes = 1643820 BTreeIndexBytes = 2065359 DataBytes = 1073744719 DataPages = 70000 FlatIndexBytes = 3454718 BTreeIndexBytes = 3553208 00000.000 II| FAKE_ENV: Born at 2025-07-08T11:56:19.387234Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 ...compacting ...waiting until compacted ...compacting ...waiting until compacted 00133.687 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00133.690 NN| TABLET_SAUSAGECACHE: Poison cache serviced 265 reqs hit {1 102b} miss {266 17592354026b} 00133.690 II| FAKE_ENV: Shut order, stopping 4 BS groups 00133.690 II| FAKE_ENV: DS.0 gone, left {15399b, 2}, put {222302b, 1558} 00133.691 II| FAKE_ENV: DS.1 gone, left {4398138657b, 535}, put {8813752974b, 2851} 00134.115 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00134.115 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00134.115 II| FAKE_ENV: All BS storage groups are stopped 00134.115 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00134.123 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped |63.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices |63.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |63.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::TestKeepAvailableMode >> TCmsTest::StateStorageTwoRings >> TCmsTest::TestSetResetMarkers >> TCmsTest::RequestRestartServicesOk |63.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [TM] {RESULT} ydb/core/tablet_flat/ut_large/unittest |63.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> TCmsTest::RequestRestartServicesRejectSecond >> test.py::test[count-count_all_grouped--Results] [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |63.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTest::StateStorageNodesFromOneRing >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] |63.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> TCmsTest::WalleTasks >> TBlobStorageProxyTest::TestBlock >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices >> KqpPragma::Auth |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TCmsTest::RequestRestartServicesMultipleNodes |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TxUsage::WriteToTopic_Demo_1 [GOOD] >> TCmsTest::CollectInfo >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-07-08T11:58:22.843329Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:22.843373Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:22.848591Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:22.848636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:22.860132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:22.861202Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:131:2157], cookie=3633668795134219466, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-07-08T11:58:22.861247Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:131:2157], cookie=3633668795134219466) 2025-07-08T11:58:22.861354Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2162], cookie=13472189689416453808, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-07-08T11:58:22.861363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2162], cookie=13472189689416453808) 2025-07-08T11:58:22.861401Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2165], cookie=12151675784723435080, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-07-08T11:58:22.861442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-07-08T11:58:22.882561Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2165], cookie=12151675784723435080) 2025-07-08T11:58:22.882749Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:146:2170], cookie=2177271351788765671, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-07-08T11:58:22.882810Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-07-08T11:58:22.895011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:146:2170], cookie=2177271351788765671) 2025-07-08T11:58:23.145860Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.145895Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.149863Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.149899Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.177390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.177570Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:133:2159], cookie=16173691682369360773, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-07-08T11:58:23.177712Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:23.189016Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:133:2159], cookie=16173691682369360773) 2025-07-08T11:58:23.189208Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:143:2167], cookie=16096474894082275740, path="/Root/Res", config={ }) 2025-07-08T11:58:23.189269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-07-08T11:58:23.200540Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:143:2167], cookie=16096474894082275740) 2025-07-08T11:58:23.201164Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2172]. Cookie: 6112145739758464373. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:23.201186Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2172], cookie=6112145739758464373) 2025-07-08T11:58:23.201314Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2172]. Cookie: 11243507360349484968. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-07-08T11:58:23.201322Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:148:2172], cookie=11243507360349484968) 2025-07-08T11:58:25.234250Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:25.234296Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:25.238636Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:25.238684Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:25.252526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:25.252648Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:133:2159], cookie=4789566503817608701, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:25.252738Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:25.281459Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:133:2159], cookie=4789566503817608701) 2025-07-08T11:58:25.281712Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:143:2167], cookie=12937702756332921182, path="/Root/Res", config={ }) 2025-07-08T11:58:25.281782Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-07-08T11:58:25.293827Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:143:2167], cookie=12937702756332921182) 2025-07-08T11:58:25.294116Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:148:2172]. Cookie: 2282254622917956771. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:25.294130Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:148:2172], cookie=2282254622917956771) 2025-07-08T11:58:25.294231Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:148:2172]. Cookie: 13107295735242514511. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-07-08T11:58:25.294239Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:148:2172], cookie=13107295735242514511) 2025-07-08T11:58:27.526206Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:27.526239Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:27.529958Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:27.530004Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:27.552968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:27.553092Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2159], cookie=10077821960504543496, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:27.553162Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:27.565667Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2159], cookie=10077821960504543496) 2025-07-08T11:58:27.565911Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2167]. Cookie: 5810051943003118701. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:27.565920Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2167], cookie=5810051943003118701) 2025-07-08T11:58:27.565991Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2167]. Cookie: 13730954184363233013. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-07-08T11:58:27.565997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:143:2167], cookie=13730954184363233013) 2025-07-08T11:58:27.566034Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:143:2167]. Cookie: 6034195259027907077. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-07-08T11:58:27.566038Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:143:2167], cookie=6034195259027907077) 2025-07-08T11:58:29.617485Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:29.617512Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:29.620672Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:29.620722Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:29.646233Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:29.646380Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:133:2159], cookie=6406687926995478098, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-07-08T11:58:29.646469Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:29.662118Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:133:2159], cookie=6406687926995478098) 2025-07-08T11:58:29.662352Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:143:2167]. Cookie: 9017692829449493090. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:29.662362Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:143:2167], cookie=9017692829449493090) 2025-07-08T11:58:29.662453Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:143:2167]. Cookie: 12565066472047091217. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-07-08T11:58:29.662460Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:143:2167], cookie=12565066472047091217) 2025-07-08T11:58:32.303879Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:174:2191]. Cookie: 11602882145264964608. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:32.303909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:174:2191], cookie=11602882145264964608) 2025-07-08T11:58:32.304004Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:174:2191]. Cookie: 6973905066240375429. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-07-08T11:58:32.304012Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:174:2191], cookie=6973905066240375429) 2025-07-08T11:58:34.509693Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:201:2217]. Cookie: 7688723778199952448. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:34.509718Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:201:2217], cookie=7688723778199952448) 2025-07-08T11:58:34.509797Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:201:2217]. Cookie: 8267344029153288627. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-07-08T11:58:34.509804Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:201:2217], cookie=8267344029153288627) >> TxUsage::WriteToTopic_Demo_10 >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |63.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> test.py::test[join-full_equal_null-off-ForceBlocks] [GOOD] >> test.py::test[join-full_equal_null-off-Results] [SKIPPED] >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial |63.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |63.9%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part19/pytest >> test.py::test[window-win_func_with_struct_access-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [08/Jul/2025 11:56:27] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [08/Jul/2025 11:56:28] "GET /mylib.sql HTTP/1.1" 200 - 127.0.0.1 - - [08/Jul/2025 11:56:30] "GET /mylib.sql HTTP/1.1" 200 - >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestProcessingQueue >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TCmsTenatsTest::TestClusterLimit >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex |64.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 23655, MsgBus: 24980 2025-07-08T11:58:35.917301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679258436005897:2078];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:35.918930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bd9/r3tmp/tmpxKVkdP/pdisk_1.dat 2025-07-08T11:58:36.014330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:36.016654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:36.016680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:36.021248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23655, node 1 2025-07-08T11:58:36.061150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:36.061164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:36.061166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:36.061215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24980 TClient is connected to server localhost:24980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:36.185885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:36.213039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:36.233714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:36.270749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.331632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:36.389860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:36.470547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.530550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.540867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.561536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.625501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.646518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.661115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:36.859142Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679262730975656:2448], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-07-08T11:58:36.859591Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjUxN2RmZWUtNzAzM2Q3M2QtZDdhN2ZhNzQtZTY2NzAyMzg=, ActorId: [1:7524679262730975653:2446], ActorState: ExecuteState, TraceId: 01jzmydx9q9951ka8mecfhz1r9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-07-08T11:58:36.913947Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 31881, MsgBus: 19867 2025-07-08T11:58:37.253695Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679267573894310:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:37.256070Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bd9/r3tmp/tmpdSd22e/pdisk_1.dat 2025-07-08T11:58:37.274222Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31881, node 2 2025-07-08T11:58:37.291940Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:37.291954Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:37.291956Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:37.292006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19867 TClient is connected to server localhost:19867 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:58:37.359839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:37.359867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:37.360200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.361499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:37.362476Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:37.369947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:37.381604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:37.410023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:37.473187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:37.616704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.627607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.640102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.651991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.666119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.684022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.699636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:37.912605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:38.039186Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975918077, txId: 281474976715674] shutting down 2025-07-08T11:58:38.251522Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 23455, MsgBus: 12893 2025-07-08T11:58:33.802208Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679253035692989:2239];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:33.821236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0027f9/r3tmp/tmpsG4E6L/pdisk_1.dat 2025-07-08T11:58:33.862573Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23455, node 1 2025-07-08T11:58:33.900295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:33.900309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:33.900311Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:33.900358Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12893 2025-07-08T11:58:33.934112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:33.934149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:33.941409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:33.973055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:33.978279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:34.247537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:58:34.276612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:34.276685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:34.276784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:34.276815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:34.276834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:34.276860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:34.276881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:34.276905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:34.276925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:34.276944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:34.276987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:34.277010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7524679257330660813:2290];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:34.287904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:34.287943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:34.288043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:34.288070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:34.288093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:34.288121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:34.288143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:34.288168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:34.288190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:34.288211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:34.288234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:34.288256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7524679257330660818:2295];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:34.306150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:34.306194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:34.306294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:34.306320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:34.306336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:34.306356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:34.306374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:34.306395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7524679257330660825:2296];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:34.306411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240378 ... 075186224037981;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037892,72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037913; 2025-07-08T11:58:37.930504Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:37.930528Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:37.930577Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930585Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930591Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930598Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=45;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930604Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=46;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930610Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=47;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930617Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=48;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037897,72075186224037969,72075186224037970;receive=72075186224037892; 2025-07-08T11:58:37.930635Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=50;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930641Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037969;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:37.930642Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=51;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930649Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=52;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930656Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=53;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930663Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=54;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930671Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=55;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930678Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=56;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037969,72075186224037970;receive=72075186224037897; 2025-07-08T11:58:37.930741Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037970;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:37.930822Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=58;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930830Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=59;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930837Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=60;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930845Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=61;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930853Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=62;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930860Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=63;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930868Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=64;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896,72075186224037970;receive=72075186224037969; 2025-07-08T11:58:37.930880Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:37.930887Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=66;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930894Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=67;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930902Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=68;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930908Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=69;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930915Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=70;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930922Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=71;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.930935Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;self_id=[2:7524679265555731486:2357];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037981;local_tx_no=72;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037896;receive=72075186224037970; 2025-07-08T11:58:37.931009Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715662; 2025-07-08T11:58:38.078727Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:215;event=finished_tx;tx_id=281474976715665; >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] |64.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> test.py::test[distinct-distinct_list_after_group-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::TestOutdatedState >> TCmsTest::PriorityRange >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplaceDevices >> Describe::Location [GOOD] >> LocalPartition::Basic >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> TCmsTest::SysTabletsNode [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] Test command err: 2025-07-08T11:58:35.224482Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.225694Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.226642Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.226704Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.226746Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.226830Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.227955Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.228008Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.228321Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.228426Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.229538Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.229576Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.229601Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.229624Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.248709Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.269871Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.270007Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.271397Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.271492Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.271497Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.271505Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.271508Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.271520Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.271583Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.271595Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.273757Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:35.304767Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.304812Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:36.787442Z node 9 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.790430Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.791679Z node 9 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.791720Z node 9 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.792171Z node 9 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.792267Z node 9 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.793793Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.793817Z node 9 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.793852Z node 9 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.793935Z node 9 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.794542Z node 9 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.794619Z node 9 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.794658Z node 9 :CMS DEBUG: Using default config 2025-07-08T11:58:36.794679Z node 9 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.807638Z node 9 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.819377Z node 9 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.819464Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.819501Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.819594Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.819599Z node 9 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.819608Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.819612Z node 9 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.819620Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.819635Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.819953Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 9 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 10 PDiskId: 10 VSlotId: 1 ... h state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2025-07-08T11:58:37.109719Z node 9 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:37.109727Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109729Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2025-07-08T11:58:37.109731Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2025-07-08T11:58:37.109734Z node 9 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:37.109738Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109740Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 6, down nodes: 0 2025-07-08T11:58:37.109742Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 6, down nodes: 0 2025-07-08T11:58:37.109744Z node 9 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:37.109748Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109750Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2025-07-08T11:58:37.109752Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 7, down nodes: 0 2025-07-08T11:58:37.109754Z node 9 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:37.109766Z node 9 :CMS DEBUG: Accepting permission: id# user-p-5, requestId# user-r-4, owner# user 2025-07-08T11:58:37.109772Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.109775Z node 9 :CMS DEBUG: Accepting permission: id# user-p-6, requestId# user-r-4, owner# user 2025-07-08T11:58:37.109778Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.109780Z node 9 :CMS DEBUG: Accepting permission: id# user-p-7, requestId# user-r-4, owner# user 2025-07-08T11:58:37.109783Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.109786Z node 9 :CMS DEBUG: Accepting permission: id# user-p-8, requestId# user-r-4, owner# user 2025-07-08T11:58:37.109789Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.109794Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:37.109826Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-5, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109832Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-6, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109837Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-7, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.109841Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-8, validity# 1970-01-01T00:03:00.335560Z, action# Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 2025-07-08T11:58:37.122181Z node 9 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:37.122306Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-4" Permissions { Id: "user-p-5" Action { Type: RESTART_SERVICES Host: "21" Services: "dynnode" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12013 } } } Permissions { Id: "user-p-6" Action { Type: RESTART_SERVICES Host: "22" Services: "dynnode" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12014 } } } Permissions { Id: "user-p-7" Action { Type: RESTART_SERVICES Host: "23" Services: "dynnode" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12015 } } } Permissions { Id: "user-p-8" Action { Type: RESTART_SERVICES Host: "24" Services: "dynnode" Duration: 60000000 } Deadline: 180335560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12016 } } } } 2025-07-08T11:58:37.133972Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.133995Z node 9 :CMS INFO: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134001Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134008Z node 9 :CMS INFO: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134017Z node 9 :CMS INFO: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134024Z node 9 :CMS INFO: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134031Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134036Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134090Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:37.134108Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:37.134119Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:37.134257Z node 9 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:37.134265Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2025-07-08T11:58:37.134274Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 8, down nodes: 0 2025-07-08T11:58:37.134309Z node 9 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:37.134312Z node 9 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:37.134314Z node 9 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:37.134318Z node 9 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:37.134332Z node 9 :CMS DEBUG: Accepting permission: id# user-p-9, requestId# user-r-5, owner# user 2025-07-08T11:58:37.134338Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.134347Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:37.134384Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-9, validity# 1970-01-01T00:03:00.437072Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 2025-07-08T11:58:37.145218Z node 9 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:37.145288Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-5" Permissions { Id: "user-p-9" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 60000000 } Deadline: 180437072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2025-07-08T11:58:37.157970Z node 9 :CMS INFO: Adding lock for Host ::1:12013 (21) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.157992Z node 9 :CMS INFO: Adding lock for Host ::1:12009 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.157999Z node 9 :CMS INFO: Adding lock for Host ::1:12001 (9) (permission user-p-9 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158005Z node 9 :CMS INFO: Adding lock for Host ::1:12014 (22) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158011Z node 9 :CMS INFO: Adding lock for Host ::1:12010 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158017Z node 9 :CMS INFO: Adding lock for Host ::1:12012 (20) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158024Z node 9 :CMS INFO: Adding lock for Host ::1:12011 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158031Z node 9 :CMS INFO: Adding lock for Host ::1:12016 (24) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158038Z node 9 :CMS INFO: Adding lock for Host ::1:12015 (23) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:37.158105Z node 9 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:37.158117Z node 9 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:37.158126Z node 9 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:37.158254Z node 9 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:37.158261Z node 9 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 2025-07-08T11:58:37.158269Z node 9 :CMS DEBUG: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 9, down nodes: 0 2025-07-08T11:58:37.158291Z node 9 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2025-07-08T11:58:37.158302Z node 9 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:37.170559Z node 9 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:37.170624Z node 9 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (9) has planned shutdown (permission user-p-9 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-6" Deadline: 420538584 } >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RestartNodeInDownState [GOOD] Test command err: 2025-07-08T11:58:35.107493Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.108709Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.110025Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.110099Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.110154Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.110241Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.111538Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.111612Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.111969Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.112104Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.114903Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.114979Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.115029Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.115061Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.142216Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.165971Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.166132Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.167595Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.167735Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.167741Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.167749Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.167753Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.167768Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.167819Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.167848Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.169704Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:35.205881Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.205947Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:35.233088Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.233119Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.233185Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:35.233415Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Id { NodeId: 18 PDiskId: 18 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1000 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1000 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1000 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1000 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1000 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1000 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1000 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1001 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1001 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1001 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1001 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1001 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1001 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1001 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1002 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1002 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1002 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1002 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1002 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1002 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1002 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 17 PDiskId: 17 VSlotId: 1003 } VSlotId { NodeId: 18 PDiskId: 18 VSlotId: 1003 } VSlotId { NodeId: 19 PDiskId: 19 VSlotId: 1003 } VSlotId { NodeId: 20 PDiskId: 20 VSlotId: 1003 } VSlotId { NodeId: 21 PDiskId: 21 VSlotId: 1003 } VSlotId { NodeId: 22 PDiskId: 22 VSlotId: 1003 } VSlotId { NodeId: 23 PDiskId: 23 VSlotId: 1003 } VSlotId { NodeId: 24 PDiskId: 24 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:38.496306Z node 17 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.496375Z node 17 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.519892Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.519982Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.520289Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 17 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: DOWN Services { Name: "storage" State: DOWN Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: DOWN Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: DOWN Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: DOWN Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: DOWN Timestamp: 120029000 } Devices { Name: "pdisk-18-18" State: DOWN Timestamp: 120029000 } Timestamp: 120029000 NodeId: 18 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029000 } } 2025-07-08T11:58:38.520354Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:38.520366Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.520378Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Down, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:38.520417Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.520433Z node 17 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:38.520441Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:38.520453Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.520553Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.029000Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.561588Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.610708Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.610729Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:38.610805Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180029000 Extentions { Type: HostInfo Hosts { Name: "::1" State: DOWN NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:38.610825Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.029000Z ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-07-08T11:58:35.027373Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.033185Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.042280Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.042377Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.049177Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.049229Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.049256Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.049292Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.049346Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.049437Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.089547Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.089588Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.089643Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.089668Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.118298Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.161341Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.161449Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.162931Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.163049Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.163057Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.163066Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.163070Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.163082Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.163138Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.163166Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.169924Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 18 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 19 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 19 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 19 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 19 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 20 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 20 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 20 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 20 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 21 VSlotId: 1000 } GroupId: 4 GroupGeneration: ... t: "17" Services: "storage" Duration: 60000000 } Deadline: 180030000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-07-08T11:58:38.581045Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.648170Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-07-08T11:58:38.648194Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-07-08T11:58:38.648198Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-07-08T11:58:38.648201Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-07-08T11:58:38.648204Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-07-08T11:58:38.648208Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-07-08T11:58:38.648211Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-07-08T11:58:38.648214Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-07-08T11:58:38.648364Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.648417Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.648479Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648489Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648503Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648522Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:38.648526Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:38.648529Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:38.648533Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648583Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-07-08T11:58:38.648607Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648613Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648618Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648622Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648642Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:38.648658Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648664Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648671Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648675Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648693Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } 2025-07-08T11:58:38.648706Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648711Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648715Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 20, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648718Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648736Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 20 InterconnectPort: 12004 } } } } 2025-07-08T11:58:38.648750Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648755Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648759Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 21, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648763Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648781Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12005 } } } } 2025-07-08T11:58:38.648794Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648800Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648805Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648809Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648826Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12006 } } } } 2025-07-08T11:58:38.648840Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648844Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648848Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648852Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648870Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12007 } } } } 2025-07-08T11:58:38.648883Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.648888Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.648892Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.648895Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.648912Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12008 } } } } >> TCmsTenatsTest::TestNoneTenantPolicy >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] Test command err: 2025-07-08T11:58:34.751702Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:34.755385Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:34.755506Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:34.755994Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:34.756104Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:34.756527Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:34.759053Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:34.759227Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:34.759293Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:34.759327Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:34.759399Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:34.759614Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:34.759656Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:34.759729Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:34.785955Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:34.797278Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:34.797379Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.798841Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.798977Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:34.798983Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:34.798991Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:34.798995Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:34.799005Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:34.799031Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:34.801292Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:34.825236Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.862848Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.862921Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:34.905691Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:34.905739Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:34.905823Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:34.906169Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... ode 17 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.585019Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.585060Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.585085Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:38.585403Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 17 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 18 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 300029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 300029000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 300029000 } Timestamp: 300029000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 300029000 } } 2025-07-08T11:58:38.585465Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.585475Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 2025-07-08T11:58:38.585487Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.585527Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:38.585531Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:38.585534Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:38.585538Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.585554Z node 17 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:38.585563Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.585590Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.585630Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.029000Z, action# Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 2025-07-08T11:58:38.628539Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.683234Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:38.683327Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 60000000 } Deadline: 360029000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-07-08T11:58:38.683338Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.029000Z 2025-07-08T11:58:38.707967Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.708059Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.708142Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:38.708286Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.708297Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2025-07-08T11:58:38.708307Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:38.708349Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.708365Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:38.708371Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.708381Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.708409Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.132512Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2025-07-08T11:58:38.719705Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.719734Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:38.719848Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 360132512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:38.731348Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.731369Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.731427Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.731499Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:38.731651Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:38.731659Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2025-07-08T11:58:38.731670Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:38.731717Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.731732Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-3, owner# user 2025-07-08T11:58:38.731743Z node 17 :CMS INFO: Adding lock for Host ::1:12003 (19) (permission user-p-3 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:38.731751Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.731775Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:06:00.234024Z, action# Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2025-07-08T11:58:38.742659Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.742683Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:38.742758Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-3" Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } Deadline: 360234024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] Test command err: 2025-07-08T11:58:34.691608Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:34.694667Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:34.694849Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:34.695426Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:34.695494Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:34.696667Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:34.698702Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:34.698881Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:34.698968Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:34.699004Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:34.700018Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:34.700055Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:34.700094Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:34.700167Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:34.729006Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:34.739972Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:34.740070Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.741471Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.741607Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:34.741614Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:34.741623Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:34.741627Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:34.741636Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:34.741662Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:34.743327Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:34.757290Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.810889Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.810952Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:34.849119Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:34.849171Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:34.849258Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:34.849400Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Timestamp: 300027000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Timestamp: 300027000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Timestamp: 300027000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Timestamp: 300027000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027000 } Timestamp: 300027000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Timestamp: 300027000 } } 2025-07-08T11:58:34.849477Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:34.849486Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-07-08T11:58:34.849500Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:34.849511Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:34.849514Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:34.849518Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:34.849537Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:34.849546Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:34.849558Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:34.849599Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.027000Z, action# Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-07-08T11:58:34.889040Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:34.960162Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:34.960270Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } Deadline: 360027000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:34.960281Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.027000Z 2025-07-08T11:58:35.014359Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:35.014404Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.014420Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.014431Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:35.014483Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "3" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:35.014491Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "3" Services: "storage" Duration: 60000000 2025-07-08T11:58:35.014501Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 3, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:35.014512Z node 1 :CMS DEBUG: Ring: 0; State: Restart 2025-07-08T11:58:35.014516Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:35.014523Z node 1 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Too many unavailable state storage rings. Restarting rings: 1. Temporary (for a 2 minutes) locked rings: 0. Maximum allowed number of unavailable rings for this mode: 1) 2025-07-08T11:58:35.014541Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:35.026396Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:35.026466Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "3" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Too many unavailable state storage rings. Restarting rings: 1. Temporary (for a 2 minutes) locked rings: 0. Maximum allowed number of unavailable rings for this mode: 1" } RequestId: "user-r-2" Deadline: 600128512 } 2025-07-08T11:58:36.279281Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.281343Z node 6 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.281894Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.282045Z node 6 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.282081Z node 6 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.282150Z node 6 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.283138Z node 6 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.283195Z node 6 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.283620Z node 6 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.283710Z node 6 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.285811Z node 6 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.285872Z node 6 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.285920Z node 6 :CMS DEBUG: Using default config 2025-07-08T11:58:36.285945Z node 6 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.298976Z node 6 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.322740Z node 6 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.322816Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.322838Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.322940Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.322944Z node 6 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.322950Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.322953Z node 6 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.322966Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.322978Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.322999Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.323108Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:36.370726Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.370809Z node 6 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:36.371139Z node 6 :CMS INFO: OnTabletDead: 72057594037936128 2025-07-08T11:58:36.371149Z node 6 :CMS DEBUG: TCms::Cleanup 2025-07-08T11:58:36.374009Z node 6 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.37 ... 2025-07-08T11:58:38.887413Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.887426Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Down, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887435Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887439Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887443Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887447Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887451Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887455Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887458Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887462Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887466Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 18, with state: Down, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.887470Z node 16 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.887530Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180240512 Extentions { Type: HostInfo Hosts { Name: "::1" State: DOWN NodeId: 18 InterconnectPort: 12003 } } } } 2025-07-08T11:58:38.905091Z node 16 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.916122Z node 16 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.916213Z node 16 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.916272Z node 16 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:38.916282Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.916296Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.916304Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 19, with state: Up, locked nodes: 0, down nodes: 3 2025-07-08T11:58:38.916314Z node 16 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '19': tablet 'FLAT_BS_CONTROLLER' has too many unavailable nodes. Locked: 0, down: 3, limit: 3) 2025-07-08T11:58:38.916349Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'19\': tablet \'FLAT_BS_CONTROLLER\' has too many unavailable nodes. Locked: 0, down: 3, limit: 3" } Deadline: 420340512 } 2025-07-08T11:58:38.916481Z node 16 :CMS INFO: OnTabletDead: 72057594037936128 2025-07-08T11:58:38.916489Z node 16 :CMS DEBUG: TCms::Cleanup 2025-07-08T11:58:38.919279Z node 16 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:38.920114Z node 16 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:38.920132Z node 16 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:38.920511Z node 16 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:38.920631Z node 16 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:38.920676Z node 16 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:38.920748Z node 16 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.920774Z node 16 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.920917Z node 16 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:38.920984Z node 16 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-07-08T11:58:38.942845Z node 16 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:38.964229Z node 16 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.964316Z node 16 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.964382Z node 16 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:38.964395Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.964408Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 20, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964418Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964422Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_SCHEMESHARD, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964426Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_TX_COORDINATOR, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964430Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_MEDIATOR, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964434Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TX_ALLOCATOR, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964438Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CONSOLE, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964441Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: CMS, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964449Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: NODE_BROKER, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964453Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: TENANT_SLOT_BROKER, on node: 20, with state: Up, locked nodes: 0, down nodes: 4 2025-07-08T11:58:38.964458Z node 16 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.964517Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "20" Services: "storage" Duration: 60000000 } Deadline: 180446512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 20 InterconnectPort: 12005 } } } } 2025-07-08T11:58:38.964670Z node 16 :CMS INFO: OnTabletDead: 72057594037936128 2025-07-08T11:58:38.964679Z node 16 :CMS DEBUG: TCms::Cleanup 2025-07-08T11:58:38.968142Z node 16 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:38.968904Z node 16 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:38.968944Z node 16 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:38.969339Z node 16 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:38.969415Z node 16 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:38.969511Z node 16 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:38.969585Z node 16 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.969609Z node 16 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.969662Z node 16 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:38.969698Z node 16 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-07-08T11:58:38.997835Z node 16 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:39.020347Z node 16 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:39.020436Z node 16 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:39.020496Z node 16 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:39.020506Z node 16 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 2025-07-08T11:58:39.020521Z node 16 :CMS DEBUG: [Nodes Counter] Checking Node: 21, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 5 2025-07-08T11:58:39.020530Z node 16 :CMS DEBUG: [Nodes Counter] Checking limits for sys tablet: FLAT_BS_CONTROLLER, on node: 21, with state: Up, locked nodes: 0, down nodes: 5 2025-07-08T11:58:39.020540Z node 16 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '21': tablet 'FLAT_BS_CONTROLLER' has too many unavailable nodes. Locked: 0, down: 5, limit: 5) 2025-07-08T11:58:39.020570Z node 16 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'21\': tablet \'FLAT_BS_CONTROLLER\' has too many unavailable nodes. Locked: 0, down: 5, limit: 5" } Deadline: 420552512 } >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleRebootDownNode >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestLoadLog >> test.py::test[join-join_no_correlation_in_order_by--ForceBlocks] [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> test.py::test[join-join_no_correlation_in_order_by--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] Test command err: 2025-07-08T11:58:34.955454Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:34.956579Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:34.958213Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:34.958282Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:34.958678Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:34.959502Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:34.959567Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:34.959623Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:34.959715Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:34.959734Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:34.961065Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:34.961103Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:34.961134Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:34.961165Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:34.990880Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.021376Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.021472Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.022842Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.022993Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.023016Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.023025Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.023029Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.023059Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.023085Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.023113Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.024997Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:35.074291Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.074348Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:35.120801Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.120842Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.120920Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:35.121279Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... orage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:38.397347Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.397360Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.397398Z node 17 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:38.397401Z node 17 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:38.397404Z node 17 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:38.397408Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.397418Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.397421Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:38.397438Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2025-07-08T11:58:38.397450Z node 17 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:38.397458Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:38.397469Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.397509Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.029000Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.438140Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.481160Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:38.481269Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180029000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-07-08T11:58:38.481280Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.029000Z 2025-07-08T11:58:39.942087Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:39.945561Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:39.947531Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:39.947615Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:39.947653Z node 25 :CMS DEBUG: Using default config. 2025-07-08T11:58:39.947718Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:39.948128Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:39.948195Z node 25 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:39.948478Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:39.948609Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:39.949983Z node 25 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:39.950012Z node 25 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:39.950045Z node 25 :CMS DEBUG: Using default config 2025-07-08T11:58:39.950068Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:39.979009Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:40.013267Z node 25 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:40.013395Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:40.013428Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:40.013528Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:40.013535Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:40.013544Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:40.013548Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:40.013562Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:40.013617Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:40.013634Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:40.013919Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:40.065345Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:40.065414Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:40.065613Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "" Actions { Type: RESTART_SERVICES Host: "::1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Missing user in request" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-07-08T11:58:34.353691Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:34.354553Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:34.359372Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:34.359474Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:34.359833Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:34.359858Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:34.360003Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:34.360062Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:34.365026Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:34.365114Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:34.367199Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:34.367242Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:34.367273Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:34.367304Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:34.397929Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:34.420108Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:34.420263Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.421680Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.421853Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:34.421861Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:34.421870Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:34.421874Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:34.421889Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:34.421939Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:34.421963Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:34.423825Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:34.461339Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:34.461417Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:34.502495Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:34.502653Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:34.503014Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-1-1" State: DOWN Timestamp: 120026512 } Timestamp: 120026512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 ... 025-07-08T11:58:38.254145Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.254173Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.277623Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-07-08T11:58:38.277653Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-07-08T11:58:38.277657Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-07-08T11:58:38.277660Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-07-08T11:58:38.277663Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-07-08T11:58:38.277666Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-07-08T11:58:38.277669Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-07-08T11:58:38.277672Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-07-08T11:58:38.277772Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.277835Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:15Z 2025-07-08T11:58:38.277900Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:38.277911Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:38.277925Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.277935Z node 17 :CMS DEBUG: Result: DISALLOW (reason: Affected group 0 has no parity parts) 2025-07-08T11:58:38.277964Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW Reason: "Affected group 0 has no parity parts" } } 2025-07-08T11:58:39.960567Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:39.964661Z node 25 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:39.967074Z node 25 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:39.967155Z node 25 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:39.967503Z node 25 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:39.967529Z node 25 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:39.967643Z node 25 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:39.967684Z node 25 :CMS DEBUG: Using default config. 2025-07-08T11:58:39.967756Z node 25 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:39.967777Z node 25 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:39.969235Z node 25 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:39.969285Z node 25 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:39.969317Z node 25 :CMS DEBUG: Using default config 2025-07-08T11:58:39.969337Z node 25 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:39.993213Z node 25 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:40.016833Z node 25 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:40.017057Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:40.017095Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:40.017151Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:40.017227Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:40.017234Z node 25 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:40.017244Z node 25 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:40.017249Z node 25 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:40.017273Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:40.017287Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:40.017600Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 25 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 26 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 27 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 28 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 29 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 30 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 31 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 32 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1000 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1000 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1000 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1000 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1000 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1000 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1000 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1001 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1001 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1001 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1001 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1001 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1001 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1001 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1002 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1002 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1002 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1002 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1002 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1002 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1002 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 25 PDiskId: 25 VSlotId: 1003 } VSlotId { NodeId: 26 PDiskId: 26 VSlotId: 1003 } VSlotId { NodeId: 27 PDiskId: 27 VSlotId: 1003 } VSlotId { NodeId: 28 PDiskId: 28 VSlotId: 1003 } VSlotId { NodeId: 29 PDiskId: 29 VSlotId: 1003 } VSlotId { NodeId: 30 PDiskId: 30 VSlotId: 1003 } VSlotId { NodeId: 31 PDiskId: 31 VSlotId: 1003 } VSlotId { NodeId: 32 PDiskId: 32 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:40.052679Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:40.052741Z node 25 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:40.053052Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -101 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Priority value is out of range" } } 2025-07-08T11:58:40.053136Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 101 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Priority value is out of range" } } >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TDowntimeTest::AddDowntime [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> TCmsTenatsTest::TestTenantRatioLimit >> TCmsTest::WalleCleanupTest >> TCmsTest::StateRequest >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTest::TestKeepAvailableModeDisconnects >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::VDisksEviction [GOOD] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] >> BasicUsage::WriteRead [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts >> test.py::test[distinct-distinct_list_after_group-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-07-08T11:58:19.190424Z 00000.006 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.009 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.009 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.009 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.009 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.009 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 3.906250 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.009 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.009 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 3.906250 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-07-08T11:58:19.206182Z 00000.002 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.003 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.003 II| FAKE_ENV: Starting storage for BS group 0 00000.003 II| FAKE_ENV: Starting storage for BS group 1 00000.003 II| FAKE_ENV: Starting storage for BS group 2 00000.003 II| FAKE_ENV: Starting storage for BS group 3 00000.004 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.004 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.004 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.004 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 23.437500 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.004 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.004 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.004 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.004 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.004 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.004 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.004 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.004 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.005 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.005 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.005 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.005 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.005 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.005 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.005 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.005 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.005 II| FAKE_ENV: All BS storage groups are stopped 00000.005 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.005 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-07-08T11:58:19.234145Z 00000.003 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.007 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 23.437500 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.007 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.007 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.007 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.007 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.007 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.007 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.007 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.007 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.008 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.008 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.008 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.008 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.008 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.008 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.008 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.008 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.008 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.008 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.009 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.009 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.009 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.009 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.009 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.351562 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.009 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.009 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.351562 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.009 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.009 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.009 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.009 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.009 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.009 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.332031 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.009 DD| RESOU ... II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.008 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.009 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.009 II| TABLET_EXECUTOR: Leader{1:2:122} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.009 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.009 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.009 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.009 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.009 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.009 II| TABLET_EXECUTOR: Leader{1:2:135} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.009 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.009 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.010 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.010 II| TABLET_EXECUTOR: Leader{1:2:137} suiciding, Waste{2:0, 3735b +(130, 26973b), 136 trc, -26973b acc} 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {11 18700b} miss {0 0b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {13465b, 136}, put {13485b, 137} 00000.010 II| FAKE_ENV: DS.1 gone, left {31957b, 146}, put {31957b, 146} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 87}, stopped 00000.000 II| FAKE_ENV: Born at 2025-07-08T11:58:40.163273Z 00000.001 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.001 II| FAKE_ENV: Starting storage for BS group 0 00000.002 II| FAKE_ENV: Starting storage for BS group 1 00000.002 II| FAKE_ENV: Starting storage for BS group 2 00000.002 II| FAKE_ENV: Starting storage for BS group 3 00000.002 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.002 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.002 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00000.003 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{1 on 101, Compact{1.2.13, eph 1}} 00000.003 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 1 00000.003 II| TABLET_OPS_HOST: Scan{1 on 101, Compact{1.2.13, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.003 II| TABLET_OPS_HOST: Scan{1 on 101, Compact{1.2.13, eph 1}} end=0, 10r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.003 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 13, product {1 parts epoch 2} done 00000.003 II| TABLET_EXECUTOR: Leader{1:2:25} starting compaction 00000.003 II| TABLET_EXECUTOR: Leader{1:2:26} starting Scan{3 on 101, Compact{1.2.25, eph 2}} 00000.003 II| TABLET_EXECUTOR: Leader{1:2:26} started compaction 3 00000.003 II| TABLET_OPS_HOST: Scan{3 on 101, Compact{1.2.25, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.003 II| TABLET_OPS_HOST: Scan{3 on 101, Compact{1.2.25, eph 2}} end=0, 20r seen, TFwd{fetch=300B,saved=300B,usage=300B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.004 II| TABLET_EXECUTOR: Leader{1:2:26} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 25, product {1 parts epoch 3} done 00000.004 II| TABLET_EXECUTOR: Leader{1:2:37} starting compaction 00000.004 II| TABLET_EXECUTOR: Leader{1:2:38} starting Scan{5 on 101, Compact{1.2.37, eph 3}} 00000.004 II| TABLET_EXECUTOR: Leader{1:2:38} started compaction 5 00000.004 II| TABLET_OPS_HOST: Scan{5 on 101, Compact{1.2.37, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.004 II| TABLET_OPS_HOST: Scan{5 on 101, Compact{1.2.37, eph 3}} end=0, 30r seen, TFwd{fetch=580B,saved=580B,usage=580B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.004 II| TABLET_EXECUTOR: Leader{1:2:39} Compact 5 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 37, product {1 parts epoch 4} done 00000.012 II| TABLET_EXECUTOR: Leader{1:2:49} starting compaction 00000.012 II| TABLET_EXECUTOR: Leader{1:2:50} starting Scan{7 on 101, Compact{1.2.49, eph 4}} 00000.012 II| TABLET_EXECUTOR: Leader{1:2:50} started compaction 7 00000.012 II| TABLET_OPS_HOST: Scan{7 on 101, Compact{1.2.49, eph 4}} begin on TSubset{head 5, 1m 1p 0c} 00000.012 II| TABLET_OPS_HOST: Scan{7 on 101, Compact{1.2.49, eph 4}} end=0, 40r seen, TFwd{fetch=860B,saved=860B,usage=860B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.012 II| TABLET_EXECUTOR: Leader{1:2:51} Compact 7 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 49, product {1 parts epoch 5} done 00000.013 II| TABLET_EXECUTOR: Leader{1:2:61} starting compaction 00000.013 II| TABLET_EXECUTOR: Leader{1:2:62} starting Scan{9 on 101, Compact{1.2.61, eph 5}} 00000.013 II| TABLET_EXECUTOR: Leader{1:2:62} started compaction 9 00000.013 II| TABLET_OPS_HOST: Scan{9 on 101, Compact{1.2.61, eph 5}} begin on TSubset{head 6, 1m 1p 0c} 00000.013 II| TABLET_OPS_HOST: Scan{9 on 101, Compact{1.2.61, eph 5}} end=0, 50r seen, TFwd{fetch=1.11KiB,saved=1.11KiB,usage=1.11KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.013 II| TABLET_EXECUTOR: Leader{1:2:62} Compact 9 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 61, product {1 parts epoch 6} done 00000.013 II| TABLET_EXECUTOR: Leader{1:2:73} starting compaction 00000.014 II| TABLET_EXECUTOR: Leader{1:2:74} starting Scan{11 on 101, Compact{1.2.73, eph 6}} 00000.014 II| TABLET_EXECUTOR: Leader{1:2:74} started compaction 11 00000.014 II| TABLET_OPS_HOST: Scan{11 on 101, Compact{1.2.73, eph 6}} begin on TSubset{head 7, 1m 1p 0c} 00000.014 II| TABLET_OPS_HOST: Scan{11 on 101, Compact{1.2.73, eph 6}} end=0, 60r seen, TFwd{fetch=1.39KiB,saved=1.39KiB,usage=1.39KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.014 II| TABLET_EXECUTOR: Leader{1:2:75} Compact 11 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 73, product {1 parts epoch 7} done 00000.014 II| TABLET_EXECUTOR: Leader{1:2:85} starting compaction 00000.014 II| TABLET_EXECUTOR: Leader{1:2:86} starting Scan{13 on 101, Compact{1.2.85, eph 7}} 00000.014 II| TABLET_EXECUTOR: Leader{1:2:86} started compaction 13 00000.014 II| TABLET_OPS_HOST: Scan{13 on 101, Compact{1.2.85, eph 7}} begin on TSubset{head 8, 1m 1p 0c} 00000.014 II| TABLET_OPS_HOST: Scan{13 on 101, Compact{1.2.85, eph 7}} end=0, 70r seen, TFwd{fetch=1.66KiB,saved=1.66KiB,usage=1.66KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.014 II| TABLET_EXECUTOR: Leader{1:2:87} Compact 13 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 85, product {1 parts epoch 8} done 00000.015 II| TABLET_EXECUTOR: Leader{1:2:97} starting compaction 00000.015 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.015 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.015 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.015 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.93KiB,saved=1.93KiB,usage=1.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.015 II| TABLET_EXECUTOR: Leader{1:2:98} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.023 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.023 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.023 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.023 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.025 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.21KiB,saved=2.21KiB,usage=2.21KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.025 II| TABLET_EXECUTOR: Leader{1:2:111} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.026 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.026 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.026 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.026 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.035 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.035 II| TABLET_EXECUTOR: Leader{1:2:123} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.035 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.035 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.035 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.035 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.036 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.036 II| TABLET_EXECUTOR: Leader{1:2:134} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.036 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.036 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.036 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=2, 0r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=0} 00000.036 II| TABLET_EXECUTOR: Leader{1:2:137} suiciding, Waste{2:0, 3735b +(130, 26973b), 136 trc, -26973b acc} 00000.037 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.037 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {10 15600b} miss {0 0b} 00000.037 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.037 II| FAKE_ENV: DS.0 gone, left {13467b, 136}, put {13487b, 137} 00000.037 II| FAKE_ENV: DS.1 gone, left {31957b, 146}, put {31957b, 146} 00000.037 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.037 II| FAKE_ENV: All BS storage groups are stopped 00000.037 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.037 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 87}, stopped >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TDowntimeTest::AddDowntime [GOOD] Test command err: 2025-07-08T11:58:35.366252Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.367125Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.369803Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.369875Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.370312Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.370336Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.370420Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.370462Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.370539Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.370552Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.371721Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.371738Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.371767Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.371790Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.392366Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.436288Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.436346Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.437341Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.437434Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.437440Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.437448Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.437451Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.437493Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.437514Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.437537Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.441503Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 65 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 66 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 67 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 68 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 69 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 70 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 71 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 72 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 73 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 74 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 75 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 76 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 77 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 78 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 79 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 80 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 81 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 82 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 83 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 84 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 85 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 86 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 87 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 88 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 89 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 90 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 91 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 92 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 93 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 94 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 95 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 96 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 97 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 98 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 99 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 P ... Id: Wall-E-r-3 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-2 } 2025-07-08T11:58:40.499177Z node 25 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-2" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "26" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'26\': node state: \'Locked\'" } TaskId: "task-2" Hosts: "26" } 2025-07-08T11:58:40.499278Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:40.499286Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:40.499295Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:40.499314Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:40.510067Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:40.510145Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:40.510272Z node 25 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2025-07-08T11:58:40.521698Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:40.521734Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:40.521748Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:40.521897Z node 25 :CMS INFO: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has planned shutdown (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-07-08T11:58:40.521910Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has planned shutdown (permission user-p-2 owned by user). Down: " } 2025-07-08T11:58:40.521920Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:40.521947Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: ) 2025-07-08T11:58:40.521967Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:40.522012Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 50, body# User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-07-08T11:58:40.537393Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:40.537463Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } RequestId: "Wall-E-r-2" Deadline: 420740608 } 2025-07-08T11:58:40.537517Z node 25 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } Task { TaskId: "task-1" Hosts: "27" } } 2025-07-08T11:58:40.537634Z node 25 :CMS INFO: Processing Wall-E request: TaskId: "task-2" 2025-07-08T11:58:40.554233Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:40.554270Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:40.554285Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:40.554430Z node 25 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "26" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'26\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:40.554443Z node 25 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "26" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'26\': node state: \'Locked\'" } 2025-07-08T11:58:40.554453Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:40.554487Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:40.554507Z node 25 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-3, owner# Wall-E 2025-07-08T11:58:40.554514Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2025-07-08T11:58:40.554526Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:40.554558Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "26" Duration: 18446744073709551615 2025-07-08T11:58:40.554568Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-3, owner# Wall-E 2025-07-08T11:58:40.566377Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:40.566467Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-3" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "26" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-07-08T11:58:40.566516Z node 25 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-2" Hosts: "26" } } 2025-07-08T11:58:40.566628Z node 25 :CMS INFO: Processing Wall-E request: TaskId: "task-2" 2025-07-08T11:58:40.566649Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:40.566672Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# Wall-E-p-3, reason# explicit remove 2025-07-08T11:58:40.581484Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:40.581522Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvRemoveWalleTask { TaskId: task-2 }, response# NKikimr::NCms::TEvCms::TEvWalleTaskRemoved { TaskId: task-2 } 2025-07-08T11:58:40.581532Z node 25 :CMS DEBUG: Found empty task task-2 2025-07-08T11:58:40.581595Z node 25 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskRequest { TaskId: "task-2" }, response# NKikimr::NCms::TEvCms::TEvWalleRemoveTaskResponse { Status { Code: OK } } 2025-07-08T11:58:40.581611Z node 25 :CMS DEBUG: TTxRemoveTask Execute 2025-07-08T11:58:40.581635Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove task: id# task-2 2025-07-08T11:58:40.581775Z node 25 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2025-07-08T11:58:40.593903Z node 25 :CMS DEBUG: TTxRemoveTask Complete 2025-07-08T11:58:40.606753Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:40.606789Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:40.606803Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:40.606965Z node 25 :CMS INFO: Check request: User: "Wall-E" Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 50 2025-07-08T11:58:40.606979Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (26) has scheduled action Wall-E-r-3 owned by Wall-E (priority 20 vs 50). Down: " } 2025-07-08T11:58:40.606989Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:40.607024Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:40.607043Z node 25 :CMS DEBUG: Accepting permission: id# Wall-E-p-4, requestId# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:40.607051Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission Wall-E-p-4 until 586524-01-19T08:01:49Z) 2025-07-08T11:58:40.607061Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:40.607094Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-4, validity# 586524-01-19T08:01:49.551615Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 2025-07-08T11:58:40.607103Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:40.619310Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:40.619393Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-4" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } 2025-07-08T11:58:40.619443Z node 25 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "27" } } >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-07-08T11:58:36.461022Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.469206Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.471856Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.471941Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.472296Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.472419Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.481160Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.481264Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.481333Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.481447Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.483225Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.483263Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.483300Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:36.483330Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.515003Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.537362Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.537537Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.539082Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.539225Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.539234Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.539243Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.539247Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.539261Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.539311Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.539336Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.543109Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 36 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 37 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 38 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 39 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 40 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 41 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 42 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 43 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 44 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 45 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 46 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 47 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 48 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 49 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 50 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 51 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 52 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 53 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 54 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 55 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 56 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 57 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 58 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 59 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 60 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 61 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 62 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 63 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 64 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 65 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 66 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 67 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 68 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 69 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 70 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 71 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 72 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 73 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 74 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 75 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 76 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 77 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 78 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 79 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 80 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 81 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 82 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 83 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 84 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 85 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 86 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 87 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 88 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 89 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 9 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 9 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 9 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 9 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 10 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 10 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 10 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 10 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 11 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 11 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 11 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 11 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 12 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 12 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 12 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 12 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 13 VSlotId: 1000 } GroupId: 16 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 13 VSlotId: 1001 } GroupId: 17 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 13 VSlotId: 1002 } GroupId: 18 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 13 VSlotId: 1003 } GroupId: 19 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 14 VSlotId: 1000 } GroupId: 20 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 14 VSlotId: 1001 } GroupId: 21 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 14 VSlotId: 1002 } GroupId: 22 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 14 VSlotId: 1003 } GroupId: 23 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 15 VSlotId: 1000 } GroupId: 24 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 15 VSlotId: 1001 } GroupId: 25 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 15 VSlotId: 1002 } GroupId: 26 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 15 VSlotId: 1003 } GroupId: 27 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 16 VSlotId: 1000 } GroupId: 28 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 16 VSlotId: 1001 } GroupId: 29 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 16 VSlotId: 1002 } GroupId: 30 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 16 VSlotId: 1003 } GroupId: 31 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 17 VSlotId: 1000 } GroupId: 32 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 17 VSlotId: 1001 } Grou ... ices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } } 2025-07-08T11:58:41.034952Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120540560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120540560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120540560 } Timestamp: 120540560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120540560 } 2025-07-08T11:58:41.035012Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-07-08T11:58:41.035020Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-07-08T11:58:41.035028Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-07-08T11:58:41.035051Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.035102Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:41.035107Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-07-08T11:58:41.035165Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-07-08T11:58:41.035175Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-07-08T11:58:41.035190Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-07-08T11:58:41.035196Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-07-08T11:58:41.035201Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-07-08T11:58:41.035205Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-07-08T11:58:41.035210Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-07-08T11:58:41.035215Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-07-08T11:58:41.035219Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-07-08T11:58:41.035224Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-07-08T11:58:41.035281Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035386Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035409Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035420Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035430Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035439Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035447Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035456Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120442072 ChangeTime: 120442072 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120540 2025-07-08T11:58:41.035463Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-07-08T11:58:41.051346Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.051423Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-07-08T11:58:41.051565Z node 18 :CMS INFO: User user removes request user-r-3 2025-07-08T11:58:41.051575Z node 18 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:41.051599Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-07-08T11:58:41.051605Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-07-08T11:58:41.051638Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-07-08T11:58:41.069667Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-07-08T11:58:41.069731Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-07-08T11:58:35.289037Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.289608Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.291085Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.291144Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.291484Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.291646Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.293144Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.293172Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.293225Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.293309Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.294120Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.294278Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.294310Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.294336Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.316053Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.332985Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.333666Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.335108Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.335269Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.335279Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.335288Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.335292Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.335340Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.335364Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.337414Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:35.347749Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.381382Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.381449Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:35.415204Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.415250Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.415328Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:35.415655Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... pdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-07-08T11:58:41.109296Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-07-08T11:58:41.109300Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-07-08T11:58:41.109304Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-07-08T11:58:41.109308Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-07-08T11:58:41.109379Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109561Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109589Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109601Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109613Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109623Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109634Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109644Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180027 2025-07-08T11:58:41.109654Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-07-08T11:58:41.109695Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-07-08T11:58:41.109708Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-07-08T11:58:41.109714Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-07-08T11:58:41.109759Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-07-08T11:58:41.109818Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-07-08T11:58:41.109845Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-07-08T11:58:41.109850Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-07-08T11:58:41.109853Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-07-08T11:58:41.124110Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-07-08T11:58:41.124140Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-07-08T11:58:41.138363Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.138410Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.138428Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-07-08T11:58:41.138561Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:41.138572Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-07-08T11:58:41.138582Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.138592Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.138595Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:41.138598Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:41.138601Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.138623Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:41.138631Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-07-08T11:58:41.138640Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.138675Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.127512Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-07-08T11:58:41.138692Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:41.149519Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.149609Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780127512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-07-08T11:58:41.149619Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.127512Z 2025-07-08T11:58:41.163179Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-07-08T11:58:41.163282Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.163300Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.163315Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-07-08T11:58:41.163452Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:41.163464Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-07-08T11:58:41.163474Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:41.163480Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.163502Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:41.163509Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-07-08T11:58:41.163518Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.163555Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.229024Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-07-08T11:58:41.163574Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:41.179609Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.179700Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780229024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-07-08T11:58:41.179819Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:41.179827Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:41.179841Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:41.179868Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-07-08T11:58:41.179891Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-07-08T11:58:41.179896Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:41.201352Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:41.201432Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:41.201591Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:41.201603Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:41.201617Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:41.201649Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-07-08T11:58:41.201674Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-07-08T11:58:41.201679Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:41.217909Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:41.217985Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> KqpRm::DisonnectNodes >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TExportToS3Tests::ShouldCheckQuotas [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] Test command err: 2025-07-08T11:58:35.213687Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:35.214635Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:35.215848Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:35.215905Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:35.216226Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:35.216974Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:35.217050Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:35.217093Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:35.217181Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:35.217198Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:35.218637Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:35.218664Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:35.218688Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:35.218711Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.237388Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:35.261338Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:35.261487Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.262857Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.262971Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:35.262976Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:35.262984Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:35.262988Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:35.263004Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:35.263024Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:35.263047Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:35.264600Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:35.297258Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:35.297318Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:35.330401Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.330432Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.330495Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:35.330599Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Timestamp: 300027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Timestamp: 300027512 } } 2025-07-08T11:58:35.330652Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:35.330659Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-07-08T11:58:35.330668Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:35.330676Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:35.330679Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:35.330683Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:35.330696Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:35.330726Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:35.330734Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:35.330764Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:06:00.027512Z, action# Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 2025-07-08T11:58:35.384818Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:35.433812Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:35.433905Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } Deadline: 360027512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:35.433914Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:08:00.027512Z 2025-07-08T11:58:35.457329Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:35.457381Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:35.457399Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:35.457411Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:35.457459Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:35.457467Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2025-07-08T11:58:35.457480Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:35.457491Z node 1 :CMS DEBUG: Ring: 0; State: Restart 2025-07-08T11:58:35.457495Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:35.457499Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:35.457513Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:35.457519Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:35.457527Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:35.457559Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.131024Z, action# Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 2025-07-08T11:58:35.469176Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:35.469244Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 60000000 } Deadline: 360131024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2025-07-08T11:58:36.701460Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.702911Z node 6 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.703933Z node 6 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.703974Z node 6 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.704000Z node 6 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.704054Z node 6 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.705049Z node 6 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.705111Z node 6 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.705434Z node 6 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.705489Z node 6 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.706863Z node 6 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.706891Z node 6 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.706921Z node 6 :CMS DEBUG: Using default config 2025-07-08T11:58:36.706937Z node 6 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.721401Z node 6 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.745199Z node 6 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.745295Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.745310Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.745384Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.745388Z node 6 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.745393Z node 6 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.745396Z node 6 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.745403Z node 6 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.745450Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.745459Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.745506Z node 6 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:36.782368Z node 6 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.782405Z node 6 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } Se ... ge" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 0 } Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 0 } PartialPermissionAllowed: true Schedule: false DryRun: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-6" Permissions { Id: "user-p-18" Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 0 } Deadline: 120562264 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12016 } } } Permissions { Id: "user-p-19" Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 0 } Deadline: 120562264 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12017 } } } Permissions { Id: "user-p-20" Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 0 } Deadline: 120562264 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12018 } } } } 2025-07-08T11:58:37.291156Z node 6 :CMS INFO: User user is done with permissions user-p-18 2025-07-08T11:58:37.291167Z node 6 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:37.291178Z node 6 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:37.291204Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-18, reason# explicit remove 2025-07-08T11:58:37.306641Z node 6 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:37.306728Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-18" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:37.306926Z node 6 :CMS INFO: User user is done with permissions user-p-19 2025-07-08T11:58:37.306940Z node 6 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:37.306956Z node 6 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:37.307004Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-19, reason# explicit remove 2025-07-08T11:58:37.319904Z node 6 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:37.319997Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-19" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:37.320174Z node 6 :CMS INFO: User user is done with permissions user-p-20 2025-07-08T11:58:37.320186Z node 6 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:37.320200Z node 6 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:37.320233Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-20, reason# explicit remove 2025-07-08T11:58:37.333010Z node 6 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:37.333095Z node 6 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-20" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:41.016071Z node 26 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:41.017339Z node 26 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:41.017408Z node 26 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:41.018049Z node 26 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:41.018106Z node 26 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:41.020394Z node 26 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:41.020440Z node 26 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:41.020484Z node 26 :CMS DEBUG: Using default config 2025-07-08T11:58:41.020504Z node 26 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.021001Z node 26 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:41.022991Z node 26 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:41.023024Z node 26 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:41.023045Z node 26 :CMS DEBUG: Using default config. 2025-07-08T11:58:41.023097Z node 26 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:41.036150Z node 26 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:41.046963Z node 26 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:41.047044Z node 26 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.047065Z node 26 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.047148Z node 26 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:41.047154Z node 26 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:41.047169Z node 26 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:41.047174Z node 26 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:41.047182Z node 26 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:41.047206Z node 26 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:41.047271Z node 26 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:41.057855Z node 26 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.100712Z node 26 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.100781Z node 26 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:41.101268Z node 26 :CMS INFO: OnTabletDead: 72057594037936128 2025-07-08T11:58:41.101281Z node 26 :CMS DEBUG: TCms::Cleanup 2025-07-08T11:58:41.105279Z node 26 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:41.115841Z node 26 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:41.115956Z node 26 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:41.116492Z node 26 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:41.116613Z node 26 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:41.116772Z node 26 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:41.116863Z node 26 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:41.116890Z node 26 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.117003Z node 26 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:41.117089Z node 26 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-07-08T11:58:41.257288Z node 26 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:41.318736Z node 26 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.318819Z node 26 :CMS DEBUG: Timestamp: 1970-01-01T00:03:30Z 2025-07-08T11:58:41.318892Z node 26 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:41.318903Z node 26 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.318919Z node 26 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-07-08T11:58:41.318930Z node 26 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.318936Z node 26 :CMS DEBUG: Ring: 1; State: Restart 2025-07-08T11:58:41.318941Z node 26 :CMS DEBUG: Ring: 2; State: Restart 2025-07-08T11:58:41.318945Z node 26 :CMS DEBUG: Ring: 3; State: Locked 2025-07-08T11:58:41.318948Z node 26 :CMS DEBUG: Ring: 4; State: Locked 2025-07-08T11:58:41.318951Z node 26 :CMS DEBUG: Ring: 5; State: Ok 2025-07-08T11:58:41.318954Z node 26 :CMS DEBUG: Ring: 6; State: Ok 2025-07-08T11:58:41.318957Z node 26 :CMS DEBUG: Ring: 7; State: Ok 2025-07-08T11:58:41.318960Z node 26 :CMS DEBUG: Ring: 8; State: Ok 2025-07-08T11:58:41.318968Z node 26 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4) 2025-07-08T11:58:41.318998Z node 26 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Too many unavailable state storage rings. Restarting rings: 2. Temporary (for a 2 minutes) locked rings: 2. Disabled rings: 0. Maximum allowed number of unavailable rings for this mode: 4" } Deadline: 510135000 } 2025-07-08T11:58:41.333632Z node 26 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.346874Z node 26 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.346954Z node 26 :CMS DEBUG: Timestamp: 1970-01-01T00:03:30Z 2025-07-08T11:58:41.347013Z node 26 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:41.347024Z node 26 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.347037Z node 26 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 2 2025-07-08T11:58:41.347047Z node 26 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.347054Z node 26 :CMS DEBUG: Ring: 1; State: Restart 2025-07-08T11:58:41.347058Z node 26 :CMS DEBUG: Ring: 2; State: Restart 2025-07-08T11:58:41.347061Z node 26 :CMS DEBUG: Ring: 3; State: Locked 2025-07-08T11:58:41.347065Z node 26 :CMS DEBUG: Ring: 4; State: Locked 2025-07-08T11:58:41.347068Z node 26 :CMS DEBUG: Ring: 5; State: Ok 2025-07-08T11:58:41.347071Z node 26 :CMS DEBUG: Ring: 6; State: Ok 2025-07-08T11:58:41.347074Z node 26 :CMS DEBUG: Ring: 7; State: Ok 2025-07-08T11:58:41.347077Z node 26 :CMS DEBUG: Ring: 8; State: Ok 2025-07-08T11:58:41.347081Z node 26 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.347137Z node 26 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } Deadline: 270235000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12005 } } } } >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-07-08T11:58:37.282992Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:37.284110Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:37.285954Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:37.286952Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:37.287015Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:37.287107Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:37.287312Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:37.287383Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:37.287761Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:37.287829Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:37.290009Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:37.290048Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:37.290082Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:37.290110Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:37.312593Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:37.335482Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:37.335631Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:37.337143Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:37.337289Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:37.337296Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:37.337306Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:37.337310Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:37.337343Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:37.337371Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:37.337403Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:37.339458Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:37.373710Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:37.373766Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.729084Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:38.730060Z node 9 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:38.731269Z node 9 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:38.732066Z node 9 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:38.732108Z node 9 :CMS DEBUG: Using default config. 2025-07-08T11:58:38.732168Z node 9 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:38.732313Z node 9 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:38.732371Z node 9 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:38.732703Z node 9 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:38.732764Z node 9 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:38.733834Z node 9 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:38.733864Z node 9 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:38.733895Z node 9 :CMS DEBUG: Using default config 2025-07-08T11:58:38.733912Z node 9 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.755062Z node 9 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:38.781388Z node 9 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:38.781537Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:38.781559Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.781670Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:38.781675Z node 9 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:38.781684Z node 9 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:38.781688Z node 9 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:38.781697Z node 9 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:38.781727Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:38.781739Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:38.781875Z node 9 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 9 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } Group { GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 9 PDiskId: 9 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:38.817311Z node 9 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.817374Z node 9 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { ... on: 60000000 2025-07-08T11:58:41.507344Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.507381Z node 18 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.507384Z node 18 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:41.507387Z node 18 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:41.507390Z node 18 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.507400Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.507403Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:41.507420Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2025-07-08T11:58:41.507435Z node 18 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:41.507443Z node 18 :CMS INFO: Adding lock for Host ::1:12001 (18) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.507454Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.507492Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.029000Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.507518Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:41.551829Z node 18 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.606232Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.606345Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180029000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12001 } } } } 2025-07-08T11:58:41.606356Z node 18 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.029000Z 2025-07-08T11:58:41.609491Z node 18 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:41.609520Z node 18 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:41.609538Z node 18 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:41.609555Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:41.644194Z node 18 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:41.644262Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:41.658650Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.658692Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.658710Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.658886Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 2025-07-08T11:58:41.658896Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.658905Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.658941Z node 18 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.658957Z node 18 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:41.658964Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.658974Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.659012Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.133512Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.673788Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.673895Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -100 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180133512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } 2025-07-08T11:58:41.686801Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.686926Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.686947Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.686959Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.687125Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:41.687137Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (18) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:41.687147Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:41.687155Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '19': node state: 'Locked') 2025-07-08T11:58:41.687178Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.687224Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:41.705261Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.705334Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'19\': node state: \'Locked\'" } RequestId: "user-r-1" Deadline: 420235024 } 2025-07-08T11:58:41.705447Z node 18 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:41.705455Z node 18 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:41.705463Z node 18 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:41.705485Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:41.716451Z node 18 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:41.716521Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:41.727963Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.728004Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.728019Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.728133Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:41.728141Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 Issue { Type: GENERIC Message: "Cannot lock node \'19\': node state: \'Locked\'" } 2025-07-08T11:58:41.728147Z node 18 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.728177Z node 18 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.728197Z node 18 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-07-08T11:58:41.728202Z node 18 :CMS INFO: Adding lock for Host ::1:12002 (19) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.728210Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.728243Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.338048Z, action# Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 2025-07-08T11:58:41.728250Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:41.739253Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.739353Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "19" Services: "storage" Duration: 60000000 } Deadline: 180338048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12002 } } } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 184 112 28 48 32 24 16 24 56 >> TableCreator::CreateTables >> test.py::test[join-join_no_correlation_in_order_by--Results] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestKeepAvailableModeScheduled >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> test.py::test[expr-non_persistable_insert_into_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:14.920647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:14.920674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:14.920681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:14.920686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:14.920700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:14.920704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:14.920714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:14.920740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:14.920814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:14.934564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:14.934589Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:14.938614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:14.938703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:14.938731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:14.940084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:14.940134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:14.940221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:14.940368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:14.941163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:14.941213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:14.941423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:14.941433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:14.941449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:14.941456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:14.941462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:14.941488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.942722Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:14.964027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:14.964119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.964178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:14.964224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:14.964235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:14.965109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:14.965124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:14.965130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:14.965550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:14.965910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.965928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:14.965935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:14.966602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:14.967079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:14.967121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:14.967302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:14.967328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:14.967337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:14.967411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:14.967419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:14.967448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:14.967460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:14.967864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:14.967872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:14.967911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:14.967916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:14.967927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:14.967933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:14.967943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:14.967947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:14.967952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:14.967955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:14.967959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:14.967964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:14.967969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:14.967973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:14.967983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:14.967989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:14.967993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:14.968405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:14.968421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 57594046678944 2025-07-08T11:58:43.164725Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-07-08T11:58:43.164731Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-07-08T11:58:43.164752Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:43.164837Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.164847Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.164850Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-07-08T11:58:43.164854Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T11:58:43.164859Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:43.165006Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.165016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.165019Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-07-08T11:58:43.165023Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-07-08T11:58:43.165029Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:43.165040Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-07-08T11:58:43.165474Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:43.165614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165620Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-07-08T11:58:43.165626Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165700Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-07-08T11:58:43.165720Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-07-08T11:58:43.165781Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.165823Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165841Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 21474838636 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165846Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165866Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-07-08T11:58:43.165874Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-07-08T11:58:43.165878Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-07-08T11:58:43.165883Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-07-08T11:58:43.165886Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-07-08T11:58:43.165892Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:43.165900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:43.165905Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-07-08T11:58:43.165911Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-07-08T11:58:43.165915Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-07-08T11:58:43.165919Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-07-08T11:58:43.165926Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:58:43.165933Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-07-08T11:58:43.165937Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:58:43.165941Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-07-08T11:58:43.166172Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.166428Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:43.166436Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:43.166459Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:58:43.166478Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:43.166483Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-07-08T11:58:43.166488Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:336:2312], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-07-08T11:58:43.166613Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.166622Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.166627Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-07-08T11:58:43.166631Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:58:43.166634Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:58:43.166702Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.166710Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.166713Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-07-08T11:58:43.166717Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T11:58:43.166720Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:43.166727Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-07-08T11:58:43.166735Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:289:2276] 2025-07-08T11:58:43.167097Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.167243Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-07-08T11:58:43.167257Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-07-08T11:58:43.167266Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-07-08T11:58:43.167272Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T11:58:43.167276Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-07-08T11:58:43.167280Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-07-08T11:58:43.167518Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T11:58:43.167533Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:43.167538Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:714:2653] TestWaitNotification: OK eventTxId 102 >> TableCreator::CreateTables [GOOD] |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TxUsage::WriteToTopic_Demo_10 [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> KqpRm::DisonnectNodes [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TSequence::CreateSequenceParallel ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-07-08T11:58:44.269216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679299997421096:2217];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:44.269232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00125b/r3tmp/tmpNqQ1aZ/pdisk_1.dat 2025-07-08T11:58:44.345400Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11816 TServer::EnableGrpc on GrpcPort 22019, node 1 2025-07-08T11:58:44.389309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:44.389322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:44.389324Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:44.389369Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-07-08T11:58:44.416633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:44.416661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:44.417466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:44.421406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:44.422289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-07-08T11:58:44.422983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 >> TxUsage::Sinks_Oltp_WriteToTopic_3 >> TSequence::CreateSequence >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] Test command err: 2025-07-08T11:58:36.279082Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.286517Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.288023Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.288068Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.288119Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.288219Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.295651Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.295770Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.296209Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.296458Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.297881Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.297916Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.297947Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:36.297977Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.318511Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.361304Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.361426Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.362908Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.363053Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.363060Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.363070Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.363074Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.363092Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.363149Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.363195Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.373547Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 65 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 66 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 67 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 68 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 69 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 70 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 17 PDiskId: 71 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 72 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 73 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 74 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 18 PDiskId: 75 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 76 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 77 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 78 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 19 PDiskId: 79 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 80 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 81 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 82 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 20 PDiskId: 83 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 84 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 85 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 86 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 21 PDiskId: 87 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 88 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 89 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 90 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 22 PDiskId: 91 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 92 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 93 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 94 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 23 PDiskId: 95 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 96 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 97 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 98 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 24 PDiskId: 99 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 P ... 1, validity# 1970-01-01T00:03:00.028000Z, action# Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.285454Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (33) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:43.316727Z node 33 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:43.389271Z node 33 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:43.389379Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } Deadline: 180028000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 33 InterconnectPort: 12001 } } } } 2025-07-08T11:58:43.389392Z node 33 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.028000Z 2025-07-08T11:58:43.389589Z node 33 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:43.389599Z node 33 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:43.389607Z node 33 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:43.389620Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:43.400547Z node 33 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:43.400615Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:43.406442Z node 33 :CMS INFO: Processing Wall-E request: TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "34" DryRun: false 2025-07-08T11:58:43.429778Z node 33 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:43.429828Z node 33 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:43.429844Z node 33 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:43.441693Z node 33 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:43.441736Z node 33 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:43.441753Z node 33 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:43.441895Z node 33 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 2025-07-08T11:58:43.441905Z node 33 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 2025-07-08T11:58:43.441913Z node 33 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:43.441922Z node 33 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '34': node state: 'Locked') 2025-07-08T11:58:43.441943Z node 33 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:43.441998Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 20, body# User: "Wall-E" Actions { Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'34\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:43.457329Z node 33 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:43.457424Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "Wall-E" Actions { Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'34\': node state: \'Locked\'" } RequestId: "Wall-E-r-2" Deadline: 420232024 } 2025-07-08T11:58:43.457486Z node 33 :CMS DEBUG: TTxStoreWalleTask Execute 2025-07-08T11:58:43.457516Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Store wall-e task: id# task-1, requestId# Wall-E-r-2 2025-07-08T11:58:43.471577Z node 33 :CMS DEBUG: TTxStoreWalleTask Complete 2025-07-08T11:58:43.471624Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvStoreWalleTask { Task: { TaskId: task-1 RequestId: Wall-E-r-2 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-1 } 2025-07-08T11:58:43.471701Z node 33 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "34" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'34\': node state: \'Locked\'" } TaskId: "task-1" Hosts: "34" } 2025-07-08T11:58:43.483779Z node 33 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:43.483819Z node 33 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:43.483834Z node 33 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:43.484001Z node 33 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (33) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:43.484014Z node 33 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (33) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:43.484026Z node 33 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.484061Z node 33 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.484081Z node 33 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-07-08T11:58:43.484089Z node 33 :CMS INFO: Adding lock for Host ::1:12002 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:43.484100Z node 33 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:43.484149Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.335048Z, action# Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.484159Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:43.498086Z node 33 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:43.498148Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "34" Services: "storage" Duration: 60000000 } Deadline: 180335048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12002 } } } } 2025-07-08T11:58:43.498276Z node 33 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:43.498285Z node 33 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:43.498297Z node 33 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:43.498322Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:43.514276Z node 33 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:43.514360Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:43.514534Z node 33 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2025-07-08T11:58:43.525982Z node 33 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:43.526019Z node 33 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:43.526036Z node 33 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:43.526173Z node 33 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'34\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:43.526182Z node 33 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'34\': node state: \'Locked\'" } 2025-07-08T11:58:43.526191Z node 33 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.526222Z node 33 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.526239Z node 33 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:43.526247Z node 33 :CMS INFO: Adding lock for Host ::1:12002 (34) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2025-07-08T11:58:43.526257Z node 33 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:43.526299Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 2025-07-08T11:58:43.526307Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:43.541344Z node 33 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:43.541442Z node 33 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "34" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12002 } } } } 2025-07-08T11:58:43.541489Z node 33 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "34" } } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-07-08T11:58:43.464067Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:58:43.492840Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:58:43.493011Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:58:43.494213Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:58:43.506637Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:43.508175Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:43.508222Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:58:43.508505Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:43.508621Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:58:43.509105Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:58:43.509115Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:58:43.509141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:58:43.510652Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:58:43.510667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:58:43.510676Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:58:43.510687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:58:43.510696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:58:43.510706Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:58:43.542698Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:58:43.542736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:58:43.554012Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:58:43.554057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:58:43.554073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:58:43.554084Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:58:43.554109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:58:43.554117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:58:43.554123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:58:43.554134Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:58:43.566028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:58:43.566087Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:58:43.578536Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:58:43.578606Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:58:43.578814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:58:43.578823Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:58:43.580633Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:58:43.580658Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:58:43.581197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:58:43.581302Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:58:43.581549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:58:43.581628Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat 2025-07-08T11:58:43.581636Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat 2025-07-08T11:58:43.581873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:58:43.581901Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:58:43.581917Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:58:43.581961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:58:43.582010Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:58:43.582453Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:58:43.582506Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:58:43.594093Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:58:43.594135Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:58:43.596688Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:58:43.596849Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:58:43.597275Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/00139b/r3tmp/tmpDTTH4Y/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18424178883013910119 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:58:43.597457Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:58:43.597532Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:58:43.597539Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:58:43.597582Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:58:43.597608Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:58:43.597629Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:58:43.597634Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:58:43.597642Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:58:43.597647Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:58:43.597741Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:58:43.597751Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:58:43.597756Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:58:43.597773Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2307] 2025-07-08T11:58:43.597785Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:58:43.598559Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:58:43.598576Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:58:43.598581Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:58:43.598594Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:421:2116] 2025-07-08T11:58:43.598609Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:58:43.598615Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:408:2303] 2025-07-08T11:58:43.598650Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:58:43.598655Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:409:2112] 2025-07-08T11:58:43.598830Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2307]} 2025-07-08T11:58:43.598898Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:58:43.598906Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:58:43.598910Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:58:43.599037Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:421:2116]} 2025-07-08T11:58:43.599105Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:58:43.599112Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:58:43.599115Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:58:43.617654Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:58:43.617677Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:58:43.617682Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:58:43.617687Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:58:43.630758Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-07-08T11:58:43.661491Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:58:43.662382Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-07-08T11:58:43.662406Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:58:43.662885Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:58:43.695483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2025-07-08T11:58:43.726603Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-07-08T11:58:43.773221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2025-07-08T11:58:44.101987Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:58:44.102546Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:58:44.102598Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:58:44.627655Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-07-08T11:58:44.627730Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-07-08T11:58:44.627907Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-07-08T11:58:44.627995Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2082] ServerId# [1:388:2290] TabletId# 72057594037932033 PipeClientId# [2:75:2082] 2025-07-08T11:58:44.628046Z node 2 :TX_PROXY WARN: actor# [2:180:2103] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-07-08T11:58:44.628065Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientDestroyed {TabletId=72057594046578946 ClientId=[2:421:2116]} 2025-07-08T11:58:44.628071Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar HandlePipeDestroyed - DISCONNECTED 2025-07-08T11:58:44.628077Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:58:44.628119Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:564:2116] 2025-07-08T11:58:44.629011Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:58:44.629073Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:564:2116]} 2025-07-08T11:58:44.629123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-07-08T11:58:44.629175Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:58:44.629187Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:58:44.629191Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:58:44.645326Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> LocalPartition::Basic [GOOD] >> LocalPartition::Restarts >> TSequence::CreateDropRecreate [GOOD] >> TCmsTest::StateRequestNode [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] Test command err: 2025-07-08T11:58:36.215866Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:36.220157Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:36.220250Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:36.220613Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:36.220717Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:36.221261Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:36.224025Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:36.224163Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:36.224218Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:36.224246Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:36.224309Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:36.224514Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:36.224553Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:36.224621Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:36.247408Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:36.261214Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:36.261312Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.262758Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.262909Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:36.262917Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:36.262926Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:36.262930Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:36.262941Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:36.262964Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:36.264835Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:36.288479Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:36.321356Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:36.321419Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:36.354659Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:36.354690Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:36.354745Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:36.354993Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... geTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 82 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 83 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900295Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 84 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 85 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 86 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900316Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 39, response# PDiskStateInfo { PDiskId: 117 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 118 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 119 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900338Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 40, response# PDiskStateInfo { PDiskId: 120 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 121 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 122 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900358Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 93 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 94 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 95 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900380Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 96 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 97 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 98 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900402Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 99 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900421Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 34, response# PDiskStateInfo { PDiskId: 102 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 103 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900442Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 35, response# PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900462Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 36, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900482Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 37, response# PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900504Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 38, response# PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900524Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 87 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 88 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 89 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900545Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 90 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 91 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 92 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120130 2025-07-08T11:58:42.900559Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-07-08T11:58:42.911537Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:42.911627Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: REPLACE_DEVICES Host: "25" Devices: "pdisk-25-75" Devices: "pdisk-25-76" Devices: "pdisk-25-77" Duration: 60000000 } Deadline: 180130000 } } 2025-07-08T11:58:42.911663Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.130000Z 2025-07-08T11:58:42.926311Z node 25 :CMS INFO: Adding lock for PDisk 25:76 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.926337Z node 25 :CMS INFO: Adding lock for PDisk 25:75 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.926344Z node 25 :CMS INFO: Adding lock for PDisk 25:77 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.926477Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.926495Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.926506Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.927206Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:42.927215Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-07-08T11:58:42.927226Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:42.927311Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:42.927334Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:42.927341Z node 25 :CMS INFO: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.927351Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:42.927388Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.231512Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-07-08T11:58:42.942897Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:42.943010Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180231512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } } |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-07-08T11:58:20.236165Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.236209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.240164Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.240207Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.261797Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.573681Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.573721Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.578004Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.578054Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.605305Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.884826Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:20.884865Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:20.889452Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:20.889494Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:20.901223Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:20.901391Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=13613123391142636220, session=0, seqNo=0) 2025-07-08T11:58:20.901434Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:20.922796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=13613123391142636220, session=1) 2025-07-08T11:58:20.922994Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:142:2166], cookie=4774923667435377373) 2025-07-08T11:58:20.923026Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:142:2166], cookie=4774923667435377373) 2025-07-08T11:58:21.355697Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:21.373190Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:21.769273Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:21.782652Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:22.153073Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:22.169429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:22.552660Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:22.563557Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:22.985212Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.005251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:23.393095Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.405264Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:23.775212Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.801212Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.193738Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.205889Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.581875Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.600615Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.017526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.029794Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.399379Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.417404Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.829193Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.844780Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.261138Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.285250Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.678073Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.694837Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.149382Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.167604Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.543763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.557461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.940798Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.952473Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.321108Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.334251Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.737090Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.757207Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.176173Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.187183Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.547974Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.561423Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.949146Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.965369Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.373158Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.389543Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.774553Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.785633Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.181246Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.192326Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.588349Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.601304Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.967703Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.981429Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.393142Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.409340Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.805852Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.816988Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.213661Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.225542Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.616436Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.630291Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.009881Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.021381Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.417112Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.429727Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.833134Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.844911Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.245426Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.256438Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.628006Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.645437Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.049136Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.069311Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.483943Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.505417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.892606Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.909449Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.321475Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.337373Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.717164Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.729661Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.108722Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.121414Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.490435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.504135Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.867483Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.879236Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.312809Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.329413Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.726983Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.746316Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.119199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.134956Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.521107Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.541249Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.927646Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.938858Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.319833Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.333362Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.667656Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.689703Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.051915Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.062795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.414835Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.426904Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.767809Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.779606Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:43.149108Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:43.165199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:43.533789Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:43.545127Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:43.893135Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:43.904204Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:44.329460Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:44.348207Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:44.729903Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:44.746886Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:45.168266Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:58:45.168319Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:58:45.180257Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:58:45.190668Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:473:2480], cookie=11824213867313229168) 2025-07-08T11:58:45.190716Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:473:2480], cookie=11824213867313229168) 2025-07-08T11:58:45.413313Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:45.413348Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:45.417839Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:45.417886Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:45.440695Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:45.442091Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:133:2159], cookie=11556263791072447506, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-07-08T11:58:45.442170Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-07-08T11:58:45.453193Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:133:2159], cookie=11556263791072447506) 2025-07-08T11:58:45.453718Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:45.453735Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=0) 2025-07-08T11:58:45.453775Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:144:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-07-08T11:58:45.453781Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:144:2168], cookie=0) 2025-07-08T11:58:45.494761Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:45.494799Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:142:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-07-08T11:58:45.494898Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:147:2171]) 2025-07-08T11:58:45.494934Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:144:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-07-08T11:58:45.538061Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:142:2166]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::TestLogOperationsRollback [GOOD] |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |64.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part0/pytest >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed |64.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 1), [1, 3), [3, 5), [5, 6), [6, 7), [7, 8), [8, 9), [9, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 r ... {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |64.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |64.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TCmsTest::SamePriorityRequest2 [GOOD] |64.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] Test command err: 2025-07-08T11:58:39.915186Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:39.916031Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:39.918404Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:39.918474Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:39.918808Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:39.918848Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:39.918969Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:39.919028Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:39.919132Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:39.919152Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:39.920575Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:39.920607Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:39.920631Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:39.920658Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:39.940247Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:39.967760Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:39.967898Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:39.969155Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:39.969294Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:39.969301Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:39.969311Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:39.969315Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:39.969330Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:39.969376Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:39.969405Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:39.970683Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:40.009836Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:40.009914Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:40.043841Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-07-08T11:58:40.043979Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2025-07-08T11:58:40.044027Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-07-08T11:58:40.044066Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-07-08T11:58:40.097521Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:40.169379Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-07-08T11:58:40.169521Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2025-07-08T11:58:40.169601Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "1" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-07-08T11:58:40.185974Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-07-08T11:58:40.186085Z node 1 :CMS NOTICE: Couldn't collect cluster state. 2025-07-08T11:58:40.186143Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvNotification { User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } Time: 720126512 }, response# NKikimr::NCms::TEvCms::TEvNotificationResponse { Status { Code: ERROR_TEMP Reason: "Cannot collect cluster state" } } 2025-07-08T11:58:40.198358Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:40.198402Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:40.198485Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:40.198661Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-1-1" Duration: 60000000 } Actions { Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:40.198671Z node 1 :CMS DEBUG: Checking action: Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-1-1" Duration: 60000000 2025-07-08T11:58:40.198724Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:40.198737Z node 1 :CMS DEBUG: Checking action: Type: REPLACE_DEVICES Host: "1" Devices: "pdisk-2-2" Duration: 60000000 2025-07-08T11:58:40.198756Z node 1 :CMS DEBUG: Result: DISALLOW (reason: The request is incorrect: too many disks from the one group. Fix the request or set PartialPermissionAllowed to true) 2025-07-08T11:58:40.198775Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:40.198784Z node 1 :CMS INFO: Adding lock for PDisk 1:1 (::1:/pdisk.data) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:40.198794Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:40.198847Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.326512Z, action# Type: REPLACE_DEVICES Host: "1" Devices: "pdis ... ion { Type: RESTART_SERVICES Host: "13" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 13 InterconnectPort: 12004 } } } Permissions { Action { Type: RESTART_SERVICES Host: "21" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12012 } } } Permissions { Action { Type: RESTART_SERVICES Host: "29" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 29 InterconnectPort: 12020 } } } } 2025-07-08T11:58:43.139257Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:43.139262Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139266Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 14, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.139288Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139294Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139298Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:43.139319Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139326Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139329Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:43.139351Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139386Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "14" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 14 InterconnectPort: 12005 } } } Permissions { Action { Type: RESTART_SERVICES Host: "22" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12013 } } } Permissions { Action { Type: RESTART_SERVICES Host: "30" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12021 } } } } 2025-07-08T11:58:43.139406Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:43.139411Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139414Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 15, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.139437Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139444Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139447Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:43.139469Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139476Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139479Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:43.139501Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139535Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "15" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 15 InterconnectPort: 12006 } } } Permissions { Action { Type: RESTART_SERVICES Host: "23" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12014 } } } Permissions { Action { Type: RESTART_SERVICES Host: "31" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12022 } } } } 2025-07-08T11:58:43.139556Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:43.139561Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139564Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 16, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.139588Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139594Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139598Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:43.139620Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139627Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139630Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:43.139652Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139688Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "16" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 16 InterconnectPort: 12007 } } } Permissions { Action { Type: RESTART_SERVICES Host: "24" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12015 } } } Permissions { Action { Type: RESTART_SERVICES Host: "32" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12023 } } } } 2025-07-08T11:58:43.139708Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:43.139713Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139717Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 17, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:43.139740Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139747Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139750Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:43.139773Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139779Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 2025-07-08T11:58:43.139782Z node 10 :CMS DEBUG: [Nodes Counter] Checking Node: 33, with state: Up, with limit: 3, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:43.139804Z node 10 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:43.139839Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true Duration: 60000000 AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12008 } } } Permissions { Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12016 } } } Permissions { Action { Type: RESTART_SERVICES Host: "33" Services: "storage" Duration: 60000000 } Deadline: 180128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 33 InterconnectPort: 12024 } } } } >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] Test command err: 2025-07-08T11:58:41.049108Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:41.050005Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:41.051264Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:41.051291Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:41.051332Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:41.051404Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:41.052648Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:41.052711Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:41.053270Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:41.053375Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:41.054455Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:41.054478Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:41.054509Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:41.054534Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.074130Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:41.118631Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:41.118744Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.120281Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.120438Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:41.120445Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:41.120454Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:41.120459Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:41.120486Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:41.120509Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:41.120523Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.122588Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:41.177983Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.178044Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:41.212659Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.212702Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.212774Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.218422Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:45.480138Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:45.480149Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:45.480182Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.029000Z, action# Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 2025-07-08T11:58:45.480209Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:45.522851Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:45.564545Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:45.564653Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "17" Services: "storage" Duration: 60000000 } Deadline: 180029000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } } 2025-07-08T11:58:45.564666Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.029000Z 2025-07-08T11:58:45.589836Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:45.589914Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:45.589929Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:45.589939Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:45.590037Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:45.590045Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:45.590056Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:45.590084Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2025-07-08T11:58:45.590103Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:45.590152Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# -80, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:45.600899Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:45.601007Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } PartialPermissionAllowed: true Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420132512 } 2025-07-08T11:58:45.601137Z node 17 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:45.601147Z node 17 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:45.601158Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:45.601178Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:45.611962Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:45.612041Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:45.624199Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:45.624231Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:45.624245Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:45.624394Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:45.624410Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:45.624418Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:45.624456Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:45.624474Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:45.624481Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:45.624490Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:45.624520Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.235536Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:45.624529Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2025-07-08T11:58:45.636470Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:45.636571Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180235536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:45.636747Z node 17 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:45.636758Z node 17 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:45.636772Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:45.636800Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:45.647774Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:45.647853Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:45.659698Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:45.659742Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:45.659760Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:45.659908Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:45.659921Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:45.659935Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:45.659975Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:45.660000Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-07-08T11:58:45.660008Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:45.660019Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:45.660062Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.338560Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:45.660072Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:45.673675Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:45.673777Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180338560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-ForceBlocks] [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] Test command err: 2025-07-08T11:58:41.977093Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:41.978165Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:41.979400Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:41.979451Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:41.979498Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:41.979579Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:41.980862Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:41.980924Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:41.981371Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:41.981406Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:41.982961Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:41.982997Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:41.983032Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:41.983061Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.002998Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:42.038309Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.038431Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.039881Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.040021Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:42.040028Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:42.040036Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:42.040039Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:42.040052Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.040128Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:42.040152Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:42.042467Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:42.094751Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.094800Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.122218Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.122245Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.122314Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.122589Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... 21, with state: Up, with limit: 1, with ratio limit: 30, locked nodes: 4, down nodes: 0 2025-07-08T11:58:46.022663Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.022670Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "22" Duration: 600000000 2025-07-08T11:58:46.022673Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 22, with state: Up, with limit: 1, with ratio limit: 30, locked nodes: 5, down nodes: 0 2025-07-08T11:58:46.022709Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.022716Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "23" Duration: 600000000 2025-07-08T11:58:46.022719Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 23, with state: Up, with limit: 1, with ratio limit: 30, locked nodes: 6, down nodes: 0 2025-07-08T11:58:46.022758Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.022764Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "24" Duration: 600000000 2025-07-08T11:58:46.022770Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 24, with state: Up, with limit: 1, with ratio limit: 30, locked nodes: 7, down nodes: 0 2025-07-08T11:58:46.022817Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.022832Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-1, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022840Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission test-user-p-1 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022845Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-2, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022849Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission test-user-p-2 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022853Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-3, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022857Z node 17 :CMS INFO: Adding lock for Host ::1:12003 (19) (permission test-user-p-3 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022861Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-4, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022865Z node 17 :CMS INFO: Adding lock for Host ::1:12004 (20) (permission test-user-p-4 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022869Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-5, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022873Z node 17 :CMS INFO: Adding lock for Host ::1:12005 (21) (permission test-user-p-5 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022878Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-6, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022881Z node 17 :CMS INFO: Adding lock for Host ::1:12006 (22) (permission test-user-p-6 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022885Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-7, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022906Z node 17 :CMS INFO: Adding lock for Host ::1:12007 (23) (permission test-user-p-7 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022910Z node 17 :CMS DEBUG: Accepting permission: id# test-user-p-8, requestId# test-user-r-1, owner# test-user 2025-07-08T11:58:46.022914Z node 17 :CMS INFO: Adding lock for Host ::1:12008 (24) (permission test-user-p-8 until 1970-01-01T00:12:00Z) 2025-07-08T11:58:46.022927Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.022977Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-1, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 2025-07-08T11:58:46.022986Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-2, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 2025-07-08T11:58:46.022995Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-3, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "19" Duration: 600000000 2025-07-08T11:58:46.023001Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-4, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "20" Duration: 600000000 2025-07-08T11:58:46.023008Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-5, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "21" Duration: 600000000 2025-07-08T11:58:46.023015Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-6, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "22" Duration: 600000000 2025-07-08T11:58:46.023021Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-7, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "23" Duration: 600000000 2025-07-08T11:58:46.023028Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# test-user-p-8, validity# 1970-01-01T00:12:00.128000Z, action# Type: SHUTDOWN_HOST Host: "24" Duration: 600000000 2025-07-08T11:58:46.023037Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# test-user-r-1, owner# test-user 2025-07-08T11:58:46.023094Z node 17 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-07-08T11:58:46.023103Z node 17 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-07-08T11:58:46.023122Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-07-08T11:58:46.023128Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-07-08T11:58:46.023132Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-07-08T11:58:46.023136Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-07-08T11:58:46.023140Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-07-08T11:58:46.023144Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-07-08T11:58:46.023148Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-07-08T11:58:46.023151Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-07-08T11:58:46.024207Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024384Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024408Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024435Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024446Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024457Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024468Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024479Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120128 2025-07-08T11:58:46.024495Z node 17 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-07-08T11:58:46.035524Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.035700Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "test-user" Actions { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "20" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "21" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "22" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "23" Duration: 600000000 } Actions { Type: SHUTDOWN_HOST Host: "24" Duration: 600000000 } PartialPermissionAllowed: true Schedule: true DryRun: false Reason: "" AvailabilityMode: MODE_FORCE_RESTART MaintenanceTaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "test-user-r-1" Permissions { Id: "test-user-p-1" Action { Type: SHUTDOWN_HOST Host: "17" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 17 InterconnectPort: 12001 } } } Permissions { Id: "test-user-p-2" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } Permissions { Id: "test-user-p-3" Action { Type: SHUTDOWN_HOST Host: "19" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } Permissions { Id: "test-user-p-4" Action { Type: SHUTDOWN_HOST Host: "20" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 20 InterconnectPort: 12004 } } } Permissions { Id: "test-user-p-5" Action { Type: SHUTDOWN_HOST Host: "21" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 21 InterconnectPort: 12005 } } } Permissions { Id: "test-user-p-6" Action { Type: SHUTDOWN_HOST Host: "22" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 22 InterconnectPort: 12006 } } } Permissions { Id: "test-user-p-7" Action { Type: SHUTDOWN_HOST Host: "23" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 23 InterconnectPort: 12007 } } } Permissions { Id: "test-user-p-8" Action { Type: SHUTDOWN_HOST Host: "24" Duration: 600000000 } Deadline: 720128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 24 InterconnectPort: 12008 } } } } 2025-07-08T11:58:46.035712Z node 17 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:32:00.128000Z |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] Test command err: 2025-07-08T11:58:42.506511Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:42.510574Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:42.510649Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:42.511090Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:42.511166Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:42.548018Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:42.553319Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:42.553468Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:42.553521Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:42.553547Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.553605Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:42.553840Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:42.553880Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:42.553946Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:42.574394Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:42.587194Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.587300Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.588736Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.588890Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:42.588897Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:42.588905Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:42.588909Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:42.588917Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:42.588944Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:42.590766Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:42.615780Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.648628Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.648701Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.679314Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.679351Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.679428Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:42.679723Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 300027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 300027512 } Timestamp: 300027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 300027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 300027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 3000 ... on: You cannot get two or more disks from the same group at the same time in partial permissions allowed mode) 2025-07-08T11:58:46.519049Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-07-08T11:58:46.519056Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.519068Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.519099Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:06:00.131512Z, action# Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 2025-07-08T11:58:46.519122Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.534649Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.534677Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.534773Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "18" Duration: 60000000 } Deadline: 360131512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:46.546341Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.546368Z node 17 :CMS INFO: Adding lock for Host ::1:12001 (17) (permission user-p-1 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.546460Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.546529Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:46.546710Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.546721Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: GENERIC Message: "You cannot get two or more disks from the same group at the same time in partial permissions allowed mode" } 2025-07-08T11:58:46.546733Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:46.546766Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) is restarting (permission user-p-1 owned by user), Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: ) 2025-07-08T11:58:46.546787Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.546826Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) is restarting (permission user-p-1 owned by user), Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.558048Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.558079Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.558147Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) is restarting (permission user-p-1 owned by user), Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: " } RequestId: "user-r-1" Deadline: 600233024 } 2025-07-08T11:58:46.558295Z node 17 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:46.558308Z node 17 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:46.558323Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:46.558352Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:46.573972Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:46.574046Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:46.586242Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.586349Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.586423Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:46.586574Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) is restarting (permission user-p-1 owned by user), Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.586586Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12001 (17) is restarting (permission user-p-1 owned by user), Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: " } 2025-07-08T11:58:46.586597Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 1 2025-07-08T11:58:46.586629Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: Host ::1:12001 (17) is down) 2025-07-08T11:58:46.586652Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.586688Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: Host ::1:12001 (17) is down" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.597947Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.597971Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.598021Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: Host ::1:12001 (17) is down" } RequestId: "user-r-1" Deadline: 600336048 } 2025-07-08T11:58:46.611901Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.612013Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.612093Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:05:00Z 2025-07-08T11:58:46.612265Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: Host ::1:12001 (17) is down" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:46.612282Z node 17 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: VDisk [0:1:0:2:0] (::1:/pdisk.data) is locked by this request, Host ::1:12002 (18) is restarting (permission user-p-2 owned by user). Down: Host ::1:12001 (17) is down" } 2025-07-08T11:58:46.612352Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 19, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:46.612404Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.612437Z node 17 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-07-08T11:58:46.612445Z node 17 :CMS INFO: Adding lock for Host ::1:12003 (19) (permission user-p-3 until 1970-01-01T00:06:00Z) 2025-07-08T11:58:46.612458Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.612488Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:06:00.437560Z, action# Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 2025-07-08T11:58:46.612499Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:46.623539Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.623563Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.623647Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "19" Duration: 60000000 } Deadline: 360437560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 19 InterconnectPort: 12003 } } } } >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] Test command err: 2025-07-08T11:58:40.987592Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:40.991046Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:40.991129Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:40.991565Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:40.991657Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:40.992070Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:40.994336Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:40.994465Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:40.994510Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:40.994537Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:40.994581Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:40.997189Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:40.997238Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:40.997309Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:41.014219Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:41.024877Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:41.024933Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.026347Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.026442Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:41.026446Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:41.026452Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:41.026454Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:41.026460Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:41.026477Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:41.027472Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:41.050931Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.084177Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.084248Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:41.084527Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07-08T11:58:41.084572Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.137822Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.137936Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.138090Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120027512 } Timestamp: 120027512 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120027512 } } 2025-07-08T11:58:41.161155Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.220643Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.220735Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-07-08T11:58:41.220761Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.262142Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.262182Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.262199Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.262251Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:41.262259Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:41.262271Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.262279Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.262283Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:41.262286Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:41.262290Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.262306Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:41.262315Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.262324Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.262359Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.128512Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:41.277149Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.277263Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180128512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:41.277275Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.128512Z 2025-07-08T11:58:41.289994Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.290050Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.290069Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.290080Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.290129Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:41.290137Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:41.290147Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:41.290156Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.290172Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:41.290179Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.290187Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.290223Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.230024Z, action# Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:41.301913Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.301998Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } Deadline: 180230024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2025-07-08T11:58:41.313942Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.313966Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.314005Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.314025Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.314037Z node ... ock for Host ::1:12004 (28) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.897024Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.897060Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.336048Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-07-08T11:58:46.897068Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-4, validity# 1970-01-01T00:03:00.336048Z, action# Type: SHUTDOWN_HOST Host: "28" Duration: 60000000 2025-07-08T11:58:46.897104Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:46.909290Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.909385Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW_PARTIAL } RequestId: "user-r-1" Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180336048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } Permissions { Id: "user-p-4" Action { Type: SHUTDOWN_HOST Host: "28" Duration: 60000000 } Deadline: 180336048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 28 InterconnectPort: 12004 } } } } 2025-07-08T11:58:46.909545Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 30 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07-08T11:58:46.909600Z node 25 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:46.920363Z node 25 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:46.920436Z node 25 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-07-08T11:58:46.931731Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.931757Z node 25 :CMS INFO: Adding lock for Host ::1:12004 (28) (permission user-p-4 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.931803Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.931821Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.931832Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:46.931901Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } Actions { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:46.931916Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'29\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2025-07-08T11:58:46.931926Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 29, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:46.931931Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 29, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:46.931936Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.931950Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'30\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2025-07-08T11:58:46.931954Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 3, down nodes: 0 2025-07-08T11:58:46.931958Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 30, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 3, down nodes: 0 2025-07-08T11:58:46.931962Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.931971Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'31\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2025-07-08T11:58:46.931975Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2025-07-08T11:58:46.931979Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 31, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 4, down nodes: 0 2025-07-08T11:58:46.931983Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.931992Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 Issue { Type: TENANT_DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'32\' of tenant \'user0\': too many unavailable nodes. Locked: 2, down: 0, total: 8, limit: 30%" } 2025-07-08T11:58:46.931996Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2025-07-08T11:58:46.932000Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 32, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 5, down nodes: 0 2025-07-08T11:58:46.932004Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.932022Z node 25 :CMS DEBUG: Accepting permission: id# user-p-5, requestId# user-r-1, owner# user 2025-07-08T11:58:46.932028Z node 25 :CMS INFO: Adding lock for Host ::1:12005 (29) (permission user-p-5 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.932035Z node 25 :CMS DEBUG: Accepting permission: id# user-p-6, requestId# user-r-1, owner# user 2025-07-08T11:58:46.932041Z node 25 :CMS INFO: Adding lock for Host ::1:12006 (30) (permission user-p-6 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.932046Z node 25 :CMS DEBUG: Accepting permission: id# user-p-7, requestId# user-r-1, owner# user 2025-07-08T11:58:46.932051Z node 25 :CMS INFO: Adding lock for Host ::1:12007 (31) (permission user-p-7 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.932055Z node 25 :CMS DEBUG: Accepting permission: id# user-p-8, requestId# user-r-1, owner# user 2025-07-08T11:58:46.932060Z node 25 :CMS INFO: Adding lock for Host ::1:12008 (32) (permission user-p-8 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.932070Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.932106Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-5, validity# 1970-01-01T00:03:00.439072Z, action# Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 2025-07-08T11:58:46.932115Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-6, validity# 1970-01-01T00:03:00.439072Z, action# Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 2025-07-08T11:58:46.932123Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-7, validity# 1970-01-01T00:03:00.439072Z, action# Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 2025-07-08T11:58:46.932131Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-8, validity# 1970-01-01T00:03:00.439072Z, action# Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 2025-07-08T11:58:46.932139Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:46.945245Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.945362Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-5" Action { Type: SHUTDOWN_HOST Host: "29" Duration: 60000000 } Deadline: 180439072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 29 InterconnectPort: 12005 } } } Permissions { Id: "user-p-6" Action { Type: SHUTDOWN_HOST Host: "30" Duration: 60000000 } Deadline: 180439072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 30 InterconnectPort: 12006 } } } Permissions { Id: "user-p-7" Action { Type: SHUTDOWN_HOST Host: "31" Duration: 60000000 } Deadline: 180439072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 31 InterconnectPort: 12007 } } } Permissions { Id: "user-p-8" Action { Type: SHUTDOWN_HOST Host: "32" Duration: 60000000 } Deadline: 180439072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 32 InterconnectPort: 12008 } } } } >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] Test command err: 2025-07-08T11:58:42.499027Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:42.499957Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:42.501895Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:42.501951Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:42.502340Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:42.502358Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:42.502467Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:42.502515Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:42.502590Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:42.502604Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:42.504112Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:42.504152Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:42.504176Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:42.504199Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.523700Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:42.547978Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.548064Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.549519Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.549625Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:42.549631Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:42.549639Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:42.549643Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:42.549655Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.549720Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:42.549742Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:42.551808Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:42.590377Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.590444Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.628902Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.628988Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.629128Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.629464Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-1-1" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120027512 } Timestamp: 120027512 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 1200 ... t: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 2025-07-08T11:58:46.524910Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2025-07-08T11:58:46.524916Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:46.524922Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '18': node state: 'Locked') 2025-07-08T11:58:46.524937Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.527499Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 20, body# User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:46.546249Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.546341Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Schedule: true DryRun: false Priority: 20 }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } RequestId: "Wall-E-r-2" Deadline: 420229024 } 2025-07-08T11:58:46.546414Z node 17 :CMS DEBUG: TTxStoreWalleTask Execute 2025-07-08T11:58:46.546447Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store wall-e task: id# task-1, requestId# Wall-E-r-2 2025-07-08T11:58:46.558602Z node 17 :CMS DEBUG: TTxStoreWalleTask Complete 2025-07-08T11:58:46.558639Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvStoreWalleTask { Task: { TaskId: task-1 RequestId: Wall-E-r-2 Owner: Permissions: [] HasSingleCompositeActionGroup: 0 } }, response# NKikimr::NCms::TEvCms::TEvWalleTaskStored { TaskId: task-1 } 2025-07-08T11:58:46.558703Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCreateTaskRequest { TaskId: "task-1" Type: "automated" Issuer: "UT" Action: "reboot" Hosts: "18" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvWalleCreateTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } TaskId: "task-1" Hosts: "18" } 2025-07-08T11:58:46.558796Z node 17 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:46.558805Z node 17 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:46.558814Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:46.558833Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:46.570962Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:46.571041Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:46.571219Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2025-07-08T11:58:46.583265Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.583316Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.583337Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:46.583512Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:46.583525Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } 2025-07-08T11:58:46.583536Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Locked, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:46.583544Z node 17 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '18': node state: 'Locked') 2025-07-08T11:58:46.583571Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.583626Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# Wall-E-r-2, owner# Wall-E, order# 2, priority# 20, body# User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:46.597478Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.597563Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } RequestId: "Wall-E-r-2" Deadline: 420333560 } 2025-07-08T11:58:46.597625Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'18\': node state: \'Locked\'" } Task { TaskId: "task-1" Hosts: "18" } } 2025-07-08T11:58:46.609395Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.609439Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.609456Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:46.609632Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: -80 2025-07-08T11:58:46.609645Z node 17 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (17) has temporary lock, VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:46.609659Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:46.609696Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.609720Z node 17 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-07-08T11:58:46.609728Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:46.609739Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.609785Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.435072Z, action# Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 2025-07-08T11:58:46.609794Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:46.624597Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.624678Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 60000000 } Deadline: 180435072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:46.624827Z node 17 :CMS INFO: User user is done with permissions user-p-2 2025-07-08T11:58:46.624857Z node 17 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:46.624872Z node 17 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:46.624901Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-07-08T11:58:46.635925Z node 17 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:46.636009Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:46.636181Z node 17 :CMS INFO: Processing Wall-E request: TaskId: "task-1" 2025-07-08T11:58:46.648214Z node 17 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:46.648259Z node 17 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:46.648278Z node 17 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:46.648432Z node 17 :CMS INFO: Check request: User: "Wall-E" Actions { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false Priority: 20 2025-07-08T11:58:46.648443Z node 17 :CMS DEBUG: Checking action: Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 Issue { Type: GENERIC Message: "Cannot lock node \'18\': node state: \'Locked\'" } 2025-07-08T11:58:46.648455Z node 17 :CMS DEBUG: [Nodes Counter] Checking Node: 18, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:46.648493Z node 17 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:46.648514Z node 17 :CMS DEBUG: Accepting permission: id# Wall-E-p-3, requestId# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:46.648522Z node 17 :CMS INFO: Adding lock for Host ::1:12002 (18) (permission Wall-E-p-3 until 586524-01-19T08:01:49Z) 2025-07-08T11:58:46.648535Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.648580Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-3, validity# 586524-01-19T08:01:49.551615Z, action# Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 2025-07-08T11:58:46.648589Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-2, owner# Wall-E 2025-07-08T11:58:46.667412Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.667522Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "Wall-E" RequestId: "Wall-E-r-2" }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "Wall-E-p-3" Action { Type: REBOOT_HOST Host: "18" Duration: 18446744073709551615 } Deadline: 18446744073709551615 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 18 InterconnectPort: 12002 } } } } 2025-07-08T11:58:46.667577Z node 17 :CMS NOTICE: [AuditLog] [Wall-E adapter] Reply: request# NKikimr::NCms::TEvCms::TEvWalleCheckTaskRequest { TaskId: "task-1" }, response# NKikimr::NCms::TEvCms::TEvWalleCheckTaskResponse { Status { Code: ALLOW } Task { TaskId: "task-1" Hosts: "18" } } |64.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] Test command err: 2025-07-08T11:58:42.499792Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:42.505555Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:42.505655Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:42.506170Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:42.506287Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:42.506748Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:42.512803Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:42.513016Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:42.513076Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:42.513107Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.513188Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:42.513456Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:42.513521Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:42.513605Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:42.535010Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:42.546379Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.546477Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.548044Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.548190Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:42.548198Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:42.548208Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:42.548212Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:42.548221Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:42.548251Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:42.550251Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 1 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 2 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 3 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 4 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 5 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 6 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 7 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 8 Path: "/pdisk.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 4 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 5 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 6 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 7 } VSlot { VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 7 } Group { GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1000 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1000 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1000 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1000 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1000 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1000 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1000 } } Group { GroupId: 1 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1001 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1001 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1001 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1001 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1001 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1001 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1001 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1001 } } Group { GroupId: 2 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1002 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1002 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1002 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1002 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1002 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1002 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1002 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1002 } } Group { GroupId: 3 GroupGeneration: 1 ErasureSpecies: "block-4-2" VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1003 } VSlotId { NodeId: 2 PDiskId: 2 VSlotId: 1003 } VSlotId { NodeId: 3 PDiskId: 3 VSlotId: 1003 } VSlotId { NodeId: 4 PDiskId: 4 VSlotId: 1003 } VSlotId { NodeId: 5 PDiskId: 5 VSlotId: 1003 } VSlotId { NodeId: 6 PDiskId: 6 VSlotId: 1003 } VSlotId { NodeId: 7 PDiskId: 7 VSlotId: 1003 } VSlotId { NodeId: 8 PDiskId: 8 VSlotId: 1003 } } } } Success: true 2025-07-08T11:58:42.576469Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.610699Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.610756Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.611016Z node 1 :CMS INFO: OnTabletDead: 72057594037936128 2025-07-08T11:58:42.611025Z node 1 :CMS DEBUG: TCms::Cleanup 2025-07-08T11:58:42.612325Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:42.612963Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:42.613016Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:42.613297Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:42.613395Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:42.614430Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:42.614569Z node 1 :CMS DEBUG: Loaded config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.614590Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.614670Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:42.614717Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 } 2025-07-08T11:58:42.704750Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.767708Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.767755Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.767822Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.767976Z node 1 :CMS INFO: Check request: User: "Wall-E" Actions { Type: RESTART_SERVICES Host: "4" Services: "storage" Duration: 6000000000 } PartialPermissionAllowed: true Schedule: false DryRun: false 2025-07-08T11:58:42.767986Z node 1 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "4" Services: "storage" Duration: 6000000000 2025-07-08T11:58:42.768001Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 4, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:42.768038Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:42.768057Z node 1 :CMS DEBUG: Accepting permission: id# Wall-E-p-1, requestId# Wall-E-r-1, owner# Wall-E 2025-07-08T11:58:42.768066Z node 1 :CMS INFO: Adding lock for Host ::1:12004 (4) (permission Wall-E-p-1 until 1970-01-01T01:42:00Z) 2025-07-08T11:58:42.768079Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:42.768123Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# Wall-E-p-1, validity# 1970-01-01T01:42:00.133512Z, action# Type: RESTART_SERVICES Host: "4" Services: "storage" Duration: 6000000000 2025-07-08T11:58:42.768131Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# Wall-E-r-1, owner# Wall-E 2025-07-08T11:58:42.779600Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:42.779724Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "Wall-E" Actions { Type: RESTART_SERVICES Host: "4" Services: "storage" Duration: 6000000000 } PartialPermissionAllowed: true Schedule: false DryRun: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "Wall-E-r-1" Permissions { Id: "Wall-E-p-1" Action { Type: RESTART_SERVICES Host: "4" Services: "storage" Duration: 6000000000 } Deadline: 6120133512 Extentions { Type: HostInfo Hosts { ... tate: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029000 } } 2025-07-08T11:58:46.819960Z node 17 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 17 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 18 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 19 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 20 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 21 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 22 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 23 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120029000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120029000 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120029000 } Timestamp: 120029000 NodeId: 24 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120029000 } 2025-07-08T11:58:46.820004Z node 17 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-07-08T11:58:46.820014Z node 17 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-07-08T11:58:46.820048Z node 17 :CMS INFO: Check request: User: "user" Actions { Type: REPLACE_DEVICES Host: "17" Devices: "pdisk-17-17" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-07-08T11:58:46.820057Z node 17 :CMS DEBUG: Checking action: Type: REPLACE_DEVICES Host: "17" Devices: "pdisk-17-17" Duration: 60000000 2025-07-08T11:58:46.820065Z node 17 :CMS DEBUG: Result: WRONG_REQUEST (reason: Unable to evict vdisks to perform action: REPLACE_DEVICES) 2025-07-08T11:58:46.820083Z node 17 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:46.820123Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-07-08T11:58:46.820169Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-07-08T11:58:46.820176Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-07-08T11:58:46.820180Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-07-08T11:58:46.820185Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-07-08T11:58:46.820189Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-07-08T11:58:46.820193Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-07-08T11:58:46.820198Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-07-08T11:58:46.820203Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-07-08T11:58:46.821427Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821569Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821593Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821611Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821625Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821637Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821649Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821660Z node 17 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/pdisk.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120029 2025-07-08T11:58:46.821671Z node 17 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-07-08T11:58:46.869112Z node 17 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:46.921386Z node 17 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:46.921465Z node 17 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: REPLACE_DEVICES Host: "17" Devices: "pdisk-17-17" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: WRONG_REQUEST Reason: "Unable to evict vdisks to perform action: REPLACE_DEVICES" } RequestId: "user-r-1" } |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.3%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 12396, MsgBus: 7705 2025-07-08T11:58:17.522547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679183490776888:2155];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001951/r3tmp/tmpdzM28x/pdisk_1.dat 2025-07-08T11:58:17.554738Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:17.576533Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12396, node 1 2025-07-08T11:58:17.605432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:17.605444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:17.605447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:17.605481Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7705 2025-07-08T11:58:17.653567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:17.653591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:17.661543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:17.725728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.729097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:17.742130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.767054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.789461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.810019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.932195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.953191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.972978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.980738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.995920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.010627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.025363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.190878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.359499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmydb792q17fd9qjd0f466z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczZGJkZjktM2E5NmEzMDctMWNhMzg1OTQtYWZiYWQzNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.359552Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmydb7997vpdch2nsm9zqk4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE3YWQyMTEtMzBiNGEyY2MtYzVlMzAwNC0zNjdlN2U4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.359727Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmydb79099km6dc1awdgbgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY3N2RmNTEtOTQ1ZDA2MzktOWNhZDJiYTAtNDBmMmFmYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.359730Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmydb7e6kvyky93vfzsx7kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QzNTRiODEtZTYzZmQ4ZDktZmJmZTMxZjktYTQxOGI4NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.359852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmydb795x1qx75vz8fswh3m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRmNTgwY2UtY2Y4YmFhNTctZWViN2MyMDgtMzAzYWFjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.359968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmydb79cf6axjdbced44y40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc0ZWJmYmQtNTFiMmFhMDEtOGI4MmYzZWYtMThkNGI1Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.360191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmydb7eac545jnyhcebxbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM1MjdhYjQtN2NjYjk5NTYtNzJlZjRlZWYtZmVmOTdhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.360361Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmydb792efqtdm6d2abe8v5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJhNWViYTUtYTVjN2IzYjAtNWYxZTNhZmEtMThlNzg4MmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.360763Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmydb799005s9q049jysk1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzExMDVjNC03ZjZiM2EwNi00MGRiY2VlOS02ZWQ5MDBlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.360795Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmydb79d4r19c7rt299f13s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIzNzhkYzgtYmIxNjQ2ZTMtYmIxNTQ2MTQtMjI3MDA4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.364709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmydb7997vpdch2nsm9zqk4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE3YWQyMTEtMzBiNGEyY2MtYzVlMzAwNC0zNjdlN2U4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.365269Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmydb79d4r19c7rt299f13s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIzNzhkYzgtYmIxNjQ2ZTMtYmIxNTQ2MTQtMjI3MDA4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.365271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmydb79cf6axjdbced44y40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzc0ZWJmYmQtNTFiMmFhMDEtOGI4MmYzZWYtMThkNGI1Nzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.365413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmydb7e6kvyky93vfzsx7kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2QzNTRiODEtZTYzZmQ4ZDktZmJmZTMxZjktYTQxOGI4NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.366118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmydb79099km6dc1awdgbgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTY3N2RmNTEtOTQ1ZDA2MzktOWNhZDJiYTAtNDBmMmFmYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.366227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmydb799005s9q049jysk1a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzExMDVjNC03ZjZiM2EwNi00MGRiY2VlOS02ZWQ5MDBlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.366384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmydb795x1qx75vz8fswh3m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWRmNTgwY2UtY2Y4YmFhNTctZWViN2MyMDgtMzAzYWFjMzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.366558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmydb7eac545jnyhcebxbnb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDM1MjdhYjQtN2NjYjk5NTYtNzJlZjRlZWYtZmVmOTdhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.366681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmydb792q17fd9qjd0f466z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODczZGJkZjktM2E5NmEzMDctMWNhM ... ntExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.373611Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721478. Ctx: { TraceId: 01jzmye7j943mr430hpkn2bqyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTVjMDBmNGEtODczZjZkZTUtOGM3ZTRjZGUtNjEzMThiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.374531Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721479. Ctx: { TraceId: 01jzmye7jadvr7w315xsffzqe1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.374648Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721480. Ctx: { TraceId: 01jzmye7j943mr430hpkn2bqyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTVjMDBmNGEtODczZjZkZTUtOGM3ZTRjZGUtNjEzMThiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.375191Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721481. Ctx: { TraceId: 01jzmye7j9ec7sz92wqetgech8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.376829Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721482. Ctx: { TraceId: 01jzmye7jadvr7w315xsffzqe1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.377855Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721483. Ctx: { TraceId: 01jzmye7jadvr7w315xsffzqe1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.379251Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721484. Ctx: { TraceId: 01jzmye7jha7n96dkj4q552k8h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzhkYjYxOWYtOTk0NzQzMTktYTJkYWIwZDgtMTE4Y2M4Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.382363Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721486. Ctx: { TraceId: 01jzmye7jk1s958ys4zjx5yppt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcxMzlmNTEtOGFlMDg0ZTYtN2M2NDVmNWMtNTM0NGYyODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.382429Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721485. Ctx: { TraceId: 01jzmye7jha7n96dkj4q552k8h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzhkYjYxOWYtOTk0NzQzMTktYTJkYWIwZDgtMTE4Y2M4Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.382808Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721487. Ctx: { TraceId: 01jzmye7jkfynae1d84az4begz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWJiMjkzNzUtNDcyNGU1NzQtMjQ3NjU5MjEtNmE5ZDM1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.383617Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721488. Ctx: { TraceId: 01jzmye7jha7n96dkj4q552k8h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzhkYjYxOWYtOTk0NzQzMTktYTJkYWIwZDgtMTE4Y2M4Njg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.383650Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721489. Ctx: { TraceId: 01jzmye7jk1s958ys4zjx5yppt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcxMzlmNTEtOGFlMDg0ZTYtN2M2NDVmNWMtNTM0NGYyODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.384036Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721490. Ctx: { TraceId: 01jzmye7jkfynae1d84az4begz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWJiMjkzNzUtNDcyNGU1NzQtMjQ3NjU5MjEtNmE5ZDM1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.384405Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721491. Ctx: { TraceId: 01jzmye7jk1s958ys4zjx5yppt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcxMzlmNTEtOGFlMDg0ZTYtN2M2NDVmNWMtNTM0NGYyODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.384745Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721493. Ctx: { TraceId: 01jzmye7jkfynae1d84az4begz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWJiMjkzNzUtNDcyNGU1NzQtMjQ3NjU5MjEtNmE5ZDM1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.384777Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721492. Ctx: { TraceId: 01jzmye7jp6n7vteycxwdzyme1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzk1MjZmMTQtMjgyNTI5YTAtMjkwODY1OGEtMTI1MmYwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.386735Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721494. Ctx: { TraceId: 01jzmye7jq8k396ek6jcwc9zne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.387756Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721495. Ctx: { TraceId: 01jzmye7jq8k396ek6jcwc9zne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.388108Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721496. Ctx: { TraceId: 01jzmye7jq8k396ek6jcwc9zne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.388540Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721497. Ctx: { TraceId: 01jzmye7jq8k396ek6jcwc9zne, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.389601Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721498. Ctx: { TraceId: 01jzmye7jp6n7vteycxwdzyme1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzk1MjZmMTQtMjgyNTI5YTAtMjkwODY1OGEtMTI1MmYwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.389893Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721499. Ctx: { TraceId: 01jzmye7jp6n7vteycxwdzyme1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzk1MjZmMTQtMjgyNTI5YTAtMjkwODY1OGEtMTI1MmYwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.390324Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721500. Ctx: { TraceId: 01jzmye7jp6n7vteycxwdzyme1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Mzk1MjZmMTQtMjgyNTI5YTAtMjkwODY1OGEtMTI1MmYwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.394279Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721501. Ctx: { TraceId: 01jzmye7k1b1zvyxkg1xnewdv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTVjMDBmNGEtODczZjZkZTUtOGM3ZTRjZGUtNjEzMThiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.395373Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721502. Ctx: { TraceId: 01jzmye7k1b1zvyxkg1xnewdv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTVjMDBmNGEtODczZjZkZTUtOGM3ZTRjZGUtNjEzMThiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.395774Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721503. Ctx: { TraceId: 01jzmye7k1b1zvyxkg1xnewdv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTVjMDBmNGEtODczZjZkZTUtOGM3ZTRjZGUtNjEzMThiZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.395928Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721504. Ctx: { TraceId: 01jzmye7k2fwcyc63ys6b2mghg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS 2025-07-08T11:58:47.396798Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721505. Ctx: { TraceId: 01jzmye7k22z1za38bche2gj7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWJiMjkzNzUtNDcyNGU1NzQtMjQ3NjU5MjEtNmE5ZDM1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.397221Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721506. Ctx: { TraceId: 01jzmye7k2fwcyc63ys6b2mghg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.397527Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721507. Ctx: { TraceId: 01jzmye7k22z1za38bche2gj7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWJiMjkzNzUtNDcyNGU1NzQtMjQ3NjU5MjEtNmE5ZDM1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.397801Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721508. Ctx: { TraceId: 01jzmye7k2fwcyc63ys6b2mghg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODkzMDEzMWEtNzMxMTdhYTUtNjI4ZWQ2ZTItYTI3NzA0YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-07-08T11:58:47.399863Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721509. Ctx: { TraceId: 01jzmye7k69j4c3nzavtf908gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.399940Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jzmye7k6dy2xy80rnyqszghy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcxMzlmNTEtOGFlMDg0ZTYtN2M2NDVmNWMtNTM0NGYyODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.400707Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721511. Ctx: { TraceId: 01jzmye7k6dy2xy80rnyqszghy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YjcxMzlmNTEtOGFlMDg0ZTYtN2M2NDVmNWMtNTM0NGYyODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:47.400766Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721512. Ctx: { TraceId: 01jzmye7k69j4c3nzavtf908gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS 2025-07-08T11:58:47.401402Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721513. Ctx: { TraceId: 01jzmye7k69j4c3nzavtf908gx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTQxNWIxYzEtMzlhMGFkYTctMWMyMTYyOTAtZDBmNGYxN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:45.479243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:45.479271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:45.479277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:45.479283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:45.479300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:45.479304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:45.479319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:45.479332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:45.479400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:45.493500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:45.493526Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:45.498035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:45.498093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:45.498146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:45.499823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:45.499876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:45.499988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.500168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:45.501038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.501090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:45.501339Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:45.501347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.501365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:45.501373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:45.501379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:45.501408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.502728Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:45.523209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:45.523285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.523355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:45.523400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:45.523410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.524278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.524306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:45.524357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.524368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:45.524374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:45.524379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:45.524756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.524765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:45.524770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:45.525239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.525271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.525278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.525287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.525754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:45.526190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:45.526232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:45.526460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.526487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:45.526499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.526582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:45.526590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.526621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:45.526633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:45.527897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:45.527908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:45.527961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.527967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:45.527979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.527986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:45.527998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:45.528003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.528007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:45.528010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.528015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:45.528020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.528025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:45.528029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:45.528042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:45.528048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:45.528052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:45.528452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:45.528464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 1:58:48.617094Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:58:48.617099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:58:48.617111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-07-08T11:58:48.617115Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:48.617296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.617302Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-07-08T11:58:48.617316Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:337:2316] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-07-08T11:58:48.617494Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.617503Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-07-08T11:58:48.617511Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:343:2320] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-07-08T11:58:48.617568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.617572Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.617594Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.617597Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.617610Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:58:48.617614Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:58:48.617619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:58:48.617624Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:48.617684Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:48.617707Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:48.617711Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-07-08T11:58:48.617715Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-07-08T11:58:48.617719Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-07-08T11:58:48.617722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-07-08T11:58:48.617726Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-07-08T11:58:48.617795Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:58:48.617800Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:58:48.617805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:58:48.617809Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:48.617834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:58:48.617846Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:48.617849Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-07-08T11:58:48.617853Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-07-08T11:58:48.617857Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-07-08T11:58:48.617860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-07-08T11:58:48.617864Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-07-08T11:58:48.617874Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:413:2371] message: TxId: 102 2025-07-08T11:58:48.617881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-07-08T11:58:48.617887Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:58:48.617891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:58:48.617911Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:58:48.617916Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-07-08T11:58:48.617919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-07-08T11:58:48.617924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:58:48.617927Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-07-08T11:58:48.617931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-07-08T11:58:48.617937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:58:48.617941Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-07-08T11:58:48.617945Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-07-08T11:58:48.617951Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:58:48.618018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-07-08T11:58:48.618023Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-07-08T11:58:48.618028Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:58:48.618032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:58:48.618043Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:58:48.618066Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.618070Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.618079Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618085Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.618088Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618349Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T11:58:48.618355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618687Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618705Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:48.618719Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:413:2371] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-07-08T11:58:48.618750Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:48.618755Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:518:2469] 2025-07-08T11:58:48.618772Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:58:48.618802Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:520:2471], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:48.618806Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:48.618810Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-07-08T11:58:48.618885Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:598:2548], Recipient [7:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-07-08T11:58:48.618890Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-07-08T11:58:48.618901Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:58:48.618951Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 46us result status StatusPathDoesNotExist 2025-07-08T11:58:48.618988Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] Test command err: 2025-07-08T11:58:41.037971Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:41.038618Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:41.040218Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:41.040273Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:41.040787Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:41.040811Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:41.040874Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:41.040916Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:41.041009Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:41.041027Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:41.042967Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:41.043143Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:41.043180Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:41.043203Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.069051Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:41.093779Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:41.093880Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.095370Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.095536Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:41.095543Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:41.095552Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:41.095556Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:41.095573Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:41.095594Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:41.095608Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.097450Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:41.149370Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.149436Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:41.149621Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07-08T11:58:41.149690Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:41.182045Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.182140Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.182279Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-07-08T11:58:41.228731Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:41.273268Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:41.273352Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-07-08T11:58:41.273372Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.326331Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.326372Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.326387Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.326440Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:41.326448Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:41.326461Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2025-07-08T11:58:41.326469Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:41.326472Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:41.326475Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:41.326479Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:41.326497Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:41.326504Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.326512Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.326549Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.129000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:41.337451Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.337537Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:41.337548Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.129000Z 2025-07-08T11:58:41.348770Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:41.348828Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:41.348849Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:41.348862Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:41.348913Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:41.348920Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:41.348931Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2025-07-08T11:58:41.348944Z node 1 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '2': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%) 2025-07-08T11:58:41.348971Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:41.365495Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:41.365587Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'2\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%" } RequestId: "user-r-2" Deadline: 420230512 } 2025-07-08T11:58:41.365785Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07- ... :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 1, down nodes: 0 2025-07-08T11:58:47.602730Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:47.602734Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:47.602782Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: true AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180536048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } Permissions { Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180536048 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } 2025-07-08T11:58:47.614129Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:47.614222Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:47.614289Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:47.614301Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2025-07-08T11:58:47.614312Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.614325Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '26': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-07-08T11:58:47.614335Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 20%" } 2025-07-08T11:58:47.614343Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.614348Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-07-08T11:58:47.614365Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:47.614404Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:47.625332Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:47.625359Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:47.625417Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420636048 } 2025-07-08T11:58:47.636775Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:47.636868Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:47.636935Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:47.636965Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-07-08T11:58:47.636977Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.636989Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '26': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-07-08T11:58:47.637000Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-07-08T11:58:47.637004Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.637009Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '27': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%) 2025-07-08T11:58:47.637027Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:47.637063Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:47.650205Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:47.650235Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:47.650300Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } RequestId: "user-r-1" Deadline: 420737560 } 2025-07-08T11:58:47.661609Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:47.661690Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:47.661740Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } Actions { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } } PartialPermissionAllowed: true Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:47.661749Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'26\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-07-08T11:58:47.661757Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.661759Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:47.661763Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:47.661772Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 Issue { Type: DISABLED_NODES_LIMIT_REACHED Message: "Cannot lock node \'27\': too many unavailable nodes. Locked: 0, down: 1, total: 8, limit: 20%" } 2025-07-08T11:58:47.661775Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 20, locked nodes: 1, down nodes: 1 2025-07-08T11:58:47.661777Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 27, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 1 2025-07-08T11:58:47.661779Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:47.661790Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-1, owner# user 2025-07-08T11:58:47.661795Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:47.661799Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-1, owner# user 2025-07-08T11:58:47.661802Z node 25 :CMS INFO: Adding lock for Host ::1:12003 (27) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:47.661808Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:47.661827Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.839072Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-07-08T11:58:47.661834Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.839072Z, action# Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 2025-07-08T11:58:47.661842Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, owner# user 2025-07-08T11:58:47.673228Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:47.673259Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:47.673340Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_FORCE_RESTART }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180839072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "27" Duration: 60000000 } Deadline: 180839072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 27 InterconnectPort: 12003 } } } } >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> TTopicApiDescribes::DescribeConsumer >> TTopicApiDescribes::DescribeTopic >> TTopicApiDescribes::GetLocalDescribe |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe >> TIcNodeCache::GetNodesInfoTest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:45.654741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:45.654767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:45.654773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:45.654779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:45.654794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:45.654798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:45.654812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:45.654825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:45.654912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:45.667954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:45.667975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:45.682479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:45.682553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:45.682609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:45.684522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:45.684580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:45.684800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.685031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:45.687647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.687710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:45.687984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:45.687996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.688016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:45.688024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:45.688030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:45.688063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.689571Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:45.710200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:45.710287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.710362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:45.710413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:45.710424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.713429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.713474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:45.713542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.713555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:45.713561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:45.713567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:45.714901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.714925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:45.714933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:45.715538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.715553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.715559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.715566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.716290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:45.717432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:45.717482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:45.717712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:45.717746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:45.717758Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.717845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:45.717854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:45.717889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:45.717902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:45.718523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:45.718532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:45.718583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:45.718589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:45.718600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:45.718607Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:45.718618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:45.718622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.718627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:45.718630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.718634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:45.718679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:45.718684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:45.718688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:45.718701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:45.718708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:45.718712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:45.719197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:45.719215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 046678944, LocalPathId: 6] was 4 2025-07-08T11:58:49.527592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-07-08T11:58:49.527596Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 114 2025-07-08T11:58:49.527823Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:981:2927], Recipient [7:125:2151]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 339 } } 2025-07-08T11:58:49.527830Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-07-08T11:58:49.527841Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 339 } } 2025-07-08T11:58:49.527846Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-07-08T11:58:49.527865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 339 } } 2025-07-08T11:58:49.527878Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 1 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 339 } } 2025-07-08T11:58:49.527882Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:49.528200Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1045:2982], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T11:58:49.528208Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T11:58:49.528214Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-07-08T11:58:49.528254Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:981:2927], Recipient [7:125:2151]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 981 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-07-08T11:58:49.528259Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-07-08T11:58:49.528268Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 981 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-07-08T11:58:49.528272Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-07-08T11:58:49.528287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 981 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-07-08T11:58:49.528293Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:58:49.528301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 981 RawX2: 30064773999 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-07-08T11:58:49.528312Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:49.528317Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.528322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-07-08T11:58:49.528328Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-07-08T11:58:49.528349Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:49.528408Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.528942Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-07-08T11:58:49.528968Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.529146Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-07-08T11:58:49.529160Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.529239Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.529243Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.529330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.529335Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.529339Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-07-08T11:58:49.529358Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:981:2927] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-07-08T11:58:49.529415Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:58:49.529420Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:58:49.529427Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.529433Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-07-08T11:58:49.529443Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:49.529447Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-07-08T11:58:49.529451Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-07-08T11:58:49.529456Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-07-08T11:58:49.529481Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-07-08T11:58:49.529486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-07-08T11:58:49.529496Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:389:2357] message: TxId: 114 2025-07-08T11:58:49.529501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-07-08T11:58:49.529507Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-07-08T11:58:49.529511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-07-08T11:58:49.529536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-07-08T11:58:49.529972Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:58:49.530005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:389:2357] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-07-08T11:58:49.530042Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-07-08T11:58:49.530048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1010:2948] 2025-07-08T11:58:49.530091Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1012:2950], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:49.530096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:58:49.530100Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-07-08T11:58:49.530306Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1054:2991], Recipient [7:125:2151]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-07-08T11:58:49.530311Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T11:58:49.530917Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:49.530986Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.531067Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-07-08T11:58:49.531122Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:58:49.531599Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:49.531632Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-07-08T11:58:49.531637Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |64.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::MaxByteSizeEqualZero |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] Test command err: 2025-07-08T11:58:38.735128Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:38.735885Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:38.737316Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:38.737376Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:38.737790Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:38.738613Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:38.738674Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:38.738718Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:38.738784Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:38.738797Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:38.742003Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:38.742042Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:38.742060Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:38.742077Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.761184Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:38.782360Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:38.782434Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:38.783739Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.783833Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:38.783837Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:38.783842Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:38.783844Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:38.783859Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:38.783874Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:38.783893Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:38.784929Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:38.827843Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.827898Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:38.828050Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07-08T11:58:38.828102Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:38.864173Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.864271Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.864407Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-07-08T11:58:38.905337Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:38.946399Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:38.946468Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesLimit: 2 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-07-08T11:58:38.946490Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.993683Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:38.993735Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:38.993752Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:38.993816Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:38.993824Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:38.993837Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:38.993847Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:38.993850Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:38.993853Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:38.993857Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:38.993874Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:38.993882Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:38.993892Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:38.993935Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.129000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:39.008758Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:39.008886Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:39.008900Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.129000Z 2025-07-08T11:58:39.021913Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:39.021968Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:39.021986Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:39.022100Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:39.022150Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:39.022158Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:39.022167Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 2, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:39.022173Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:39.022191Z node 1 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:39.022197Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:39.022206Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:39.022242Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.230512Z, action# Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:39.036503Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:39.036619Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false TenantPolicy: NONE AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } Deadline: 180230512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 2 InterconnectPort: 12002 } } } } 2025-07-08T11:58:39.054109Z node 1 :CMS INFO: Adding lock for Host ::1:12002 (2) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:39.054141Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:39.054187Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:39.054209Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:39.054223Z node ... bilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "41" Duration: 60000000 } Deadline: 180130000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 41 InterconnectPort: 12001 } } } } 2025-07-08T11:58:48.531844Z node 41 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.130000Z 2025-07-08T11:58:48.543434Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.543505Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.543530Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.543546Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.543607Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.543616Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.543628Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.543639Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2025-07-08T11:58:48.543651Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.565351Z node 41 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.565468Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-2" Deadline: 420231512 } 2025-07-08T11:58:48.578176Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.578252Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.578279Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.578295Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.578355Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:48.578365Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.578378Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.578389Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 1, down: 0, limit: 1) 2025-07-08T11:58:48.578403Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.589825Z node 41 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.589924Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 1, down: 0, limit: 1" } RequestId: "user-r-3" Deadline: 420333024 } 2025-07-08T11:58:48.602461Z node 41 :CMS INFO: Adding lock for Host ::1:12001 (41) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.602533Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.602557Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.602572Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.602631Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:48.602644Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.602657Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.602661Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.602667Z node 41 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:48.602718Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: true AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Action { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } Deadline: 180434536 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 42 InterconnectPort: 12002 } } } } 2025-07-08T11:58:48.602823Z node 41 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:48.602831Z node 41 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:48.602838Z node 41 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:48.602866Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:48.621283Z node 41 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:48.621374Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:48.632668Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.632744Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.632791Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.632800Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.632810Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:48.632819Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-07-08T11:58:48.632831Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.645013Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.645037Z node 41 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.645090Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-4" Deadline: 420536048 } 2025-07-08T11:58:48.656280Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.656357Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.656403Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false 2025-07-08T11:58:48.656412Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.656422Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:48.656431Z node 41 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '42': too many unavailable nodes. Locked: 0, down: 1, limit: 1) 2025-07-08T11:58:48.656441Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.667384Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.667412Z node 41 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.667470Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_KEEP_AVAILABLE EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'42\': too many unavailable nodes. Locked: 0, down: 1, limit: 1" } RequestId: "user-r-5" Deadline: 420637560 } 2025-07-08T11:58:48.678680Z node 41 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.678750Z node 41 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.678796Z node 41 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false 2025-07-08T11:58:48.678805Z node 41 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.678815Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 1, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:48.678820Z node 41 :CMS DEBUG: [Nodes Counter] Checking Node: 42, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 1 2025-07-08T11:58:48.678826Z node 41 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:48.678840Z node 41 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-6, owner# user 2025-07-08T11:58:48.678847Z node 41 :CMS INFO: Adding lock for Host ::1:12002 (42) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.678856Z node 41 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.678879Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.739072Z, action# Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 2025-07-08T11:58:48.691529Z node 41 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.691552Z node 41 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.691613Z node 41 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_FORCE_RESTART EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-6" Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "42" Duration: 60000000 } Deadline: 180739072 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 42 InterconnectPort: 12002 } } } } |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test_stop.py::TestStop::test_stop_query[v1-streaming] [GOOD] |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part14/pytest >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 12444, MsgBus: 1399 2025-07-08T11:58:17.678842Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679181585212022:2240];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00193d/r3tmp/tmpSwgYao/pdisk_1.dat 2025-07-08T11:58:17.728670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:17.757759Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12444, node 1 2025-07-08T11:58:17.775789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:58:17.775801Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:58:17.775803Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:58:17.775836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1399 2025-07-08T11:58:17.827824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:17.827851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:17.828820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:17.858791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.861640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:17.869446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.896964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.929918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.992552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:18.102043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.109618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.124267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.135966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.150022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.163663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.177657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.345839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T11:58:18.521420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmydbcq73f76wtryrf268kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkwNzQyYTctODEwMzZiNDItMWM0NWQxODQtMTdlMTk4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.521420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmydbcnbtvm4y57rc2xy99s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZlMDQ0NjEtYTRhZTEwYjYtYTdlMTYwYi02ZmQyYTJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.522164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmydbcq3ceqhvhy1ws4wbx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3OWUyYjAtM2ZlNTFiODctYTQyM2JhY2QtODE0YmJiMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.524415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmydbcr2vyw9wdwjwr735fm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE1YWIwYWQtZGIxY2MyNzctNzA3NjBlZDMtOTFlMmY0NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.524585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmydbcr9mpzzzrq16ey5jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmViZDQwOTktYzAzOTY2YzUtNWI3MTZmNTMtZGQ3OTA3MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.524862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmydbcrfntchvfc2nrzmjgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg3NWVhNzktOGM1ZjU5ODQtNjA0ZGIyYjgtYjIxOTc5NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.525022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmydbcr7rwxgkqjbz0qh9m6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZmNDU1My04MmZlNzQ5OS05ODYyYzNmYy1mMGJhYzk1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.525115Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmydbcr0xkytkg50n1bptsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgxOWE1ZS03ZTZiZWU5NC0xOGY2ZjBlZi1jYTJhZjBjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.525217Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmydbcr07myt5cmzkqbh2hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxMWUxZDItMzAwYzNiNWItMWU1ZDkxNjYtYmZhYjRiODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.526414Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmydbcrf09x9rk5ndnc55ga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE1NWQ5MDctZGY3MzNmOWItOGE3MTU2ZTYtNTBiN2IyMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.526685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmydbcq73f76wtryrf268kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkwNzQyYTctODEwMzZiNDItMWM0NWQxODQtMTdlMTk4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.526815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmydbcnbtvm4y57rc2xy99s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZlMDQ0NjEtYTRhZTEwYjYtYTdlMTYwYi02ZmQyYTJmMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.527309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmydbcq3ceqhvhy1ws4wbx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3OWUyYjAtM2ZlNTFiODctYTQyM2JhY2QtODE0YmJiMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.529643Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmydbcr9mpzzzrq16ey5jq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmViZDQwOTktYzAzOTY2YzUtNWI3MTZmNTMtZGQ3OTA3MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.531245Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmydbcr0xkytkg50n1bptsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgxOWE1ZS03ZTZiZWU5NC0xOGY2ZjBlZi1jYTJhZjBjMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.531266Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmydbcr07myt5cmzkqbh2hq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMxMWUxZDItMzAwYzNiNWItMWU1ZDkxNjYtYmZhYjRiODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.531355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmydbcq3ceqhvhy1ws4wbx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI3OWUyYjAtM2ZlNTFiODctYTQyM2JhY2QtODE0YmJiMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.531421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmydbcrfntchvfc2nrzmjgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg3NWVhNzktOGM1ZjU5ODQtNjA0ZGIyYjgtYjIxOTc5NjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:18.531509Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715691. Ctx: { TraceId: 01jzmydbcq73f76wtryrf268kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkwNzQyYTctODEwMzZiNDItMWM0N ... ntExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.043173Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721609. Ctx: { TraceId: 01jzmyea5mb6c4sq7sm55cd0ct, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.043453Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721610. Ctx: { TraceId: 01jzmyea5m3q5dfp84baj6csax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFlYTkwNDItMWRlNmMwZTQtZGY0NTFhNDEtZjI5NjRhNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.043596Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721611. Ctx: { TraceId: 01jzmyea5m9w074qaem9chqen6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTBlYWE3Y2QtYzcyMmYyMGQtZDNmNjMwODUtNDhhZjY1Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.049634Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721612. Ctx: { TraceId: 01jzmyea5y1tkrrrv6fy4ab058, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjlkNzU0NzItZTdlMmY0ZmUtY2FkZmI5N2QtNTJjYTQ4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.052475Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721613. Ctx: { TraceId: 01jzmyea5y1tkrrrv6fy4ab058, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjlkNzU0NzItZTdlMmY0ZmUtY2FkZmI5N2QtNTJjYTQ4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.054020Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721614. Ctx: { TraceId: 01jzmyea5y1tkrrrv6fy4ab058, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjlkNzU0NzItZTdlMmY0ZmUtY2FkZmI5N2QtNTJjYTQ4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.063278Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721616. Ctx: { TraceId: 01jzmyea64d2yh3swf3wbc7dpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.063367Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721617. Ctx: { TraceId: 01jzmyea656f3sf8ds72xq4yab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.063704Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721618. Ctx: { TraceId: 01jzmyea64e8bpezzkmyef7zf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.063976Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721615. Ctx: { TraceId: 01jzmyea62ac2wszhqbatg8gtk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.065416Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721620. Ctx: { TraceId: 01jzmyea64d2yh3swf3wbc7dpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.065496Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721619. Ctx: { TraceId: 01jzmyea656f3sf8ds72xq4yab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.065696Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721621. Ctx: { TraceId: 01jzmyea64e8bpezzkmyef7zf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.065825Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721622. Ctx: { TraceId: 01jzmyea62ac2wszhqbatg8gtk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.066500Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721623. Ctx: { TraceId: 01jzmyea64d2yh3swf3wbc7dpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.066550Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721624. Ctx: { TraceId: 01jzmyea656f3sf8ds72xq4yab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.066918Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721625. Ctx: { TraceId: 01jzmyea64e8bpezzkmyef7zf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.067749Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721626. Ctx: { TraceId: 01jzmyea64d2yh3swf3wbc7dpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.067958Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721627. Ctx: { TraceId: 01jzmyea64e8bpezzkmyef7zf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.068005Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721628. Ctx: { TraceId: 01jzmyea656f3sf8ds72xq4yab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.071607Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721629. Ctx: { TraceId: 01jzmyea6p974chqq5czgwr02g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTBlYWE3Y2QtYzcyMmYyMGQtZDNmNjMwODUtNDhhZjY1Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.072721Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721630. Ctx: { TraceId: 01jzmyea6p974chqq5czgwr02g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTBlYWE3Y2QtYzcyMmYyMGQtZDNmNjMwODUtNDhhZjY1Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.078281Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jzmyea6v1s28maw9x04gk51z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.078655Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jzmyea6w03qnp7nv29j994k7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.079030Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721631. Ctx: { TraceId: 01jzmyea6v1ctspf0r002we0d7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjlkNzU0NzItZTdlMmY0ZmUtY2FkZmI5N2QtNTJjYTQ4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.079344Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jzmyea6wf2ne4wapyhr1zas4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.081757Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jzmyea6v1s28maw9x04gk51z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.081800Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jzmyea6wf2ne4wapyhr1zas4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDlhZWIwZGEtOGRhMDM0OWMtNjAyYWE0Ni1lZWNiMzViMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.081902Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jzmyea6v1ctspf0r002we0d7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjlkNzU0NzItZTdlMmY0ZmUtY2FkZmI5N2QtNTJjYTQ4N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.081941Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jzmyea6x9pvxxes1drskagbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.082143Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jzmyea6w03qnp7nv29j994k7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.083415Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jzmyea6v1s28maw9x04gk51z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.083735Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jzmyea6x9pvxxes1drskagbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.083954Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jzmyea6w03qnp7nv29j994k7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWIwODAwNzQtZjQ0N2MwOTgtY2JiNjI0NTQtZGU4MDFjOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS 2025-07-08T11:58:50.084556Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jzmyea6v1s28maw9x04gk51z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWZkOWQxN2EtYzgwMDcwNTQtZWUxYzZkMDktYzYyMGJhNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:50.085241Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jzmyea6x9pvxxes1drskagbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Mzc3ZDg1MjMtM2JmZDcwYTEtZTEyZmYzMjAtYzgzMzk2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropExternalTableWithReboots >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--Results] |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TCmsTest::ActionIssue [GOOD] >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] Test command err: 2025-07-08T11:58:42.363189Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Execute 2025-07-08T11:58:42.364115Z node 1 :CMS DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:58:42.365250Z node 1 :CMS DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:58:42.365304Z node 1 :CMS DEBUG: TTxInitScheme Execute 2025-07-08T11:58:42.365639Z node 1 :CMS DEBUG: TConsole::TTxInitScheme Complete 2025-07-08T11:58:42.366426Z node 1 :CMS DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:58:42.366480Z node 1 :CMS DEBUG: TConsole::TTxLoadState Execute 2025-07-08T11:58:42.366525Z node 1 :CMS DEBUG: Using default config. 2025-07-08T11:58:42.366594Z node 1 :CMS DEBUG: TConsole::TTxLoadState Complete 2025-07-08T11:58:42.366608Z node 1 :CMS DEBUG: StateInit event type: 104d0001 event: NKikimr::NConsole::TEvConfigsDispatcher::TEvSetConfigSubscriptionResponse 2025-07-08T11:58:42.367765Z node 1 :CMS DEBUG: TTxInitScheme Complete 2025-07-08T11:58:42.367801Z node 1 :CMS DEBUG: TTxLoadState Execute 2025-07-08T11:58:42.367823Z node 1 :CMS DEBUG: Using default config 2025-07-08T11:58:42.367848Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.389558Z node 1 :CMS DEBUG: StateInit event type: 104a0012 event: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { FeatureFlags { EnableCMSRequestPriorities: true EnableSingleCompositeActionGroup: true } } ItemKinds: 25 ItemKinds: 26 Local: true } 2025-07-08T11:58:42.411018Z node 1 :CMS DEBUG: TTxLoadState Complete 2025-07-08T11:58:42.411092Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.412470Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.412573Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-07-08T11:58:42.412579Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-07-08T11:58:42.412587Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-07-08T11:58:42.412591Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-07-08T11:58:42.412618Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-07-08T11:58:42.412638Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-07-08T11:58:42.412661Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.414291Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { } } Success: true 2025-07-08T11:58:42.461851Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.461902Z node 1 :CMS DEBUG: Updated config: TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } SentinelConfig { Enable: false } 2025-07-08T11:58:42.462054Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-07-08T11:58:42.462106Z node 1 :CMS DEBUG: TTxUpdateConfig Execute 2025-07-08T11:58:42.497810Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.497891Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.498025Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 1 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 2 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 3 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 4 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 5 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 6 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 7 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "dynnode" State: UP Version: "-1" Timestamp: 120028000 } Timestamp: 120028000 NodeId: 8 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-07-08T11:58:42.538629Z node 1 :CMS DEBUG: Running CleanupWalleTasks 2025-07-08T11:58:42.586338Z node 1 :CMS DEBUG: TTxUpdateConfig Complete 2025-07-08T11:58:42.586409Z node 1 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-07-08T11:58:42.586430Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.635017Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.635056Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.635070Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.635119Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:42.635127Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:42.635138Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:42.635142Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 1, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 0, down nodes: 0 2025-07-08T11:58:42.635150Z node 1 :CMS DEBUG: Ring: 0; State: Ok 2025-07-08T11:58:42.635154Z node 1 :CMS DEBUG: Ring: 1; State: Ok 2025-07-08T11:58:42.635156Z node 1 :CMS DEBUG: Ring: 2; State: Ok 2025-07-08T11:58:42.635160Z node 1 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:42.635175Z node 1 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-07-08T11:58:42.635183Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.635191Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:42.635224Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:03:00.129000Z, action# Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 2025-07-08T11:58:42.648914Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:42.649012Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: SHUTDOWN_HOST Host: "1" Duration: 60000000 } Deadline: 180129000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 1 InterconnectPort: 12001 } } } } 2025-07-08T11:58:42.649026Z node 1 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:05:00.129000Z 2025-07-08T11:58:42.660772Z node 1 :CMS INFO: Adding lock for Host ::1:12001 (1) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:42.660826Z node 1 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:42.660843Z node 1 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:42.660856Z node 1 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:42.660901Z node 1 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:42.660909Z node 1 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 2025-07-08T11:58:42.660920Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:42.660924Z node 1 :CMS DEBUG: [Nodes Counter] Checking Node: 2, with state: Up, with limit: 0, with ratio limit: 10, locked nodes: 1, down nodes: 0 2025-07-08T11:58:42.660935Z node 1 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Cannot lock node '2' of tenant 'user0': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%) 2025-07-08T11:58:42.660964Z node 1 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:42.672089Z node 1 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:42.672165Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "2" Duration: 60000000 } PartialPermissionAllowed: false Schedule: false DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Cannot lock node \'2\' of tenant \'user0\': too many unavailable nodes. Locked: 1, down: 0, total: 8, limit: 10%" } RequestId: "user-r-2" Deadline: 420230512 } 2025-07-08T11:58:42.672322Z node 1 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 10 } ClusterLimits { DisabledNodesLimit: 0 DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: false UpdateConfigInterval: 3 ... ration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.861946Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.862029Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } PartialPermissionAllowed: false Schedule: true DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420129024 } 2025-07-08T11:58:48.862168Z node 25 :CMS INFO: Get selected requests for user 2025-07-08T11:58:48.862183Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:48.862220Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-07-08T11:58:48.874197Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.874288Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.874303Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.874313Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.874451Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.874456Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-07-08T11:58:48.874463Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.874486Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:48.874495Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:48.874498Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 2, down nodes: 0 2025-07-08T11:58:48.874510Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: Issue in affected group with id '0': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: ) 2025-07-08T11:58:48.874527Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.874558Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.885618Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.885711Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } RequestId: "user-r-2" Deadline: 420230536 } 2025-07-08T11:58:48.885878Z node 25 :CMS INFO: Get selected requests for user 2025-07-08T11:58:48.885894Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:48.885932Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } Requests { RequestId: "user-r-2" Owner: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Reason: "" AvailabilityMode: MODE_MAX_AVAILABILITY Priority: 0 } } 2025-07-08T11:58:48.886002Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-07-08T11:58:48.886008Z node 25 :CMS DEBUG: Resulting status: OK 2025-07-08T11:58:48.886024Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-07-08T11:58:48.886051Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-07-08T11:58:48.900523Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-07-08T11:58:48.900612Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-07-08T11:58:48.913407Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-07-08T11:58:48.913462Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-07-08T11:58:48.913480Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-07-08T11:58:48.913747Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Actions { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } } PartialPermissionAllowed: false Schedule: true Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: false 2025-07-08T11:58:48.913760Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-07-08T11:58:48.913774Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 34, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-07-08T11:58:48.913817Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:48.913834Z node 25 :CMS DEBUG: Checking action: Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 Issue { Type: TOO_MANY_UNAVAILABLE_VDISKS Message: "Issue in affected group with id \'0\': too many unavailable vdisks. Locked: Host ::1:12001 (25) has planned shutdown (permission user-p-1 owned by user), VDisk [0:1:0:1:0] (::1:/pdisk.data) is locked by this request. Down: " } 2025-07-08T11:58:48.913839Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-07-08T11:58:48.913868Z node 25 :CMS DEBUG: Result: ALLOW 2025-07-08T11:58:48.913910Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-07-08T11:58:48.913919Z node 25 :CMS INFO: Adding lock for Host ::1:12010 (34) (permission user-p-2 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.913925Z node 25 :CMS DEBUG: Accepting permission: id# user-p-3, requestId# user-r-2, owner# user 2025-07-08T11:58:48.913931Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-3 until 1970-01-01T00:03:00Z) 2025-07-08T11:58:48.913943Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-07-08T11:58:48.913989Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:03:00.333560Z, action# Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 2025-07-08T11:58:48.913999Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-3, validity# 1970-01-01T00:03:00.333560Z, action# Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 2025-07-08T11:58:48.914008Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, owner# user 2025-07-08T11:58:48.925107Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-07-08T11:58:48.925219Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } Permissions { Id: "user-p-2" Action { Type: SHUTDOWN_HOST Host: "34" Duration: 60000000 } Deadline: 180333560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 34 InterconnectPort: 12010 } } } Permissions { Id: "user-p-3" Action { Type: SHUTDOWN_HOST Host: "26" Duration: 60000000 } Deadline: 180333560 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-07-08T11:58:48.925373Z node 25 :CMS INFO: Get selected requests for user 2025-07-08T11:58:48.925385Z node 25 :CMS DEBUG: Resulting status: WRONG_REQUEST Unknown request user-r-2 2025-07-08T11:58:48.925402Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: GET RequestId: "user-r-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: WRONG_REQUEST Reason: "Unknown request user-r-2" } } |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop >> TColumnShardTestReadWrite::WriteOverload+InStore |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TColumnShardTestReadWrite::RebootWriteRead >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::WriteReadStandalone >> TColumnShardTestReadWrite::WriteRead >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::WriteReadDuplicate >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TColumnShardTestReadWrite::ReadStale |64.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> TColumnShardTestReadWrite::ReadStale [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_3 [GOOD] >> EvWrite::WriteWithSplit >> TColumnShardTestReadWrite::Write [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4 |64.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-07-08T11:58:54.021516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:54.025612Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:54.025686Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:54.026422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:54.026508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:54.026556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:54.026582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:54.026599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:54.026617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:54.026635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:54.026655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:54.026671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:54.026689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.026707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:54.026727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:54.032240Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:54.032408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:54.032418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:54.032444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.032485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:54.032498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:54.032502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:54.032509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:54.032515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:54.032521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:54.032524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:54.032541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.032550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:54.032558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:54.032563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:54.032571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:54.032576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:54.032581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:54.032584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:54.032590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:54.032595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:54.032598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:54.032614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:54.032620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:54.032623Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:54.032645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:54.032654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:54.032658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:54.032671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:54.032676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.032678Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.032684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:54.032688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:54.032693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:54.032696Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:54.032729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:54.032739Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:54.032748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:54.032760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:58:54.032771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:54.032782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:54.032788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:54.032791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:54.032800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:54.032804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... r 0 2025-07-08T11:58:54.521627Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-07-08T11:58:54.521691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-07-08T11:58:54.521728Z node 1 :TX_COLUMNSHARD INFO: EnsureTable for pathId: {internal: 9438184000001, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-07-08T11:58:54.522912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:58:54.522967Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:304;method=RegisterTable;path_id=9438184000001; 2025-07-08T11:58:54.522980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=9438184000001; 2025-07-08T11:58:54.523794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:463;event=OnTieringModified;path_id=9438184000001; 2025-07-08T11:58:54.523831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:215;event=finished_tx;tx_id=10; 2025-07-08T11:58:54.544930Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=54320;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=54320;columns=10; 2025-07-08T11:58:54.546778Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=1;last=1; 2025-07-08T11:58:54.546795Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:14;writing_size=54320;operation_id=ebd78120-5bf211f0-8c8fb323-b9b1bf4d;in_flight=1;size_in_flight=54320; 2025-07-08T11:58:54.549126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:125:2157];write_id=1;path_id={internal: 9438184000001, ss: 1};entity_id=3;size=11104;limit=10240;r_count=999;fline=column_info.h:139;sizes=5552,5552;s_splitted=5616,5720;r_splitted=499,500; 2025-07-08T11:58:54.549316Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:125:2157];write_id=1;path_id={internal: 9438184000001, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=74272;count=11;actions=__DEFAULT,;waiting=1;; 2025-07-08T11:58:54.550703Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=54320;event=data_write_finished;writing_id=ebd78120-5bf211f0-8c8fb323-b9b1bf4d; 2025-07-08T11:58:54.550793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=86;data_size=62;sum=86;count=1; 2025-07-08T11:58:54.550811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=182;data_size=174;sum=182;count=2;size_of_meta=112; 2025-07-08T11:58:54.550824Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=254;data_size=246;sum=254;count=1;size_of_portion=184; 2025-07-08T11:58:54.551066Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 11 2025-07-08T11:58:54.551102Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=2;operation_id=1; 2025-07-08T11:58:54.563767Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 11 2025-07-08T11:58:54.574961Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1751975935027 at tablet 9437184, mediator 0 2025-07-08T11:58:54.574997Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-07-08T11:58:54.575067Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-07-08T11:58:54.575113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:215;event=finished_tx;tx_id=100; 2025-07-08T11:58:54.586055Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-07-08T11:58:54.586094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-07-08T11:58:54.586156Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:177;event=remove_by_insert_id;id=2;operation_id=1; 2025-07-08T11:58:54.586166Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:180;event=remove_operation;operation_id=1; 2025-07-08T11:58:54.586419Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:234;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:58:54.586431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:58:54.586467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:58:54.588620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:58:54.588639Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:54.588647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:54.588672Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:58:54.588805Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {1751975575027:max} readable: {1751975935027:max} at tablet 9437184 2025-07-08T11:58:54.599733Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-07-08T11:58:54.599786Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-07-08T11:58:54.599804Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1751975575027:max}. CS min read snapshot: {1751975635027:max}. now: 2025-07-08T11:58:54.599799Z; 2025-07-08T11:58:54.602466Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1751975575027:max} readable: {1751975935027:max} at tablet 9437184 2025-07-08T11:58:54.613181Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-07-08T11:58:54.613708Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-07-08T11:58:54.613725Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:96;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-07-08T11:58:54.613914Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-07-08T11:58:54.613938Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975575027:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {1751975575027:max}. CS min read snapshot: {1751975635027:max}. now: 2025-07-08T11:58:54.613932Z; >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::WriteRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-07-08T11:58:53.387726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.390949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.391014Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.391494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.391537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.391568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.391588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.391599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.391611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.391625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.391635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.391645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.391656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.391666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.391680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.409314Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.409421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.409435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.409480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.409530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.409544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.409550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.409560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.409570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.409578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.409582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.409600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.409608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.409616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.409620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.409630Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.409637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.409644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.409648Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.409657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.409665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.409671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.409696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.409703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.409707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.409728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.409736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.409740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.409752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.409760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.409763Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.409771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.409778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.409785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.409789Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.409834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=14; 2025-07-08T11:58:53.409844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:53.409853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:53.409865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:58:53.409876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.409890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.409901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.409907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.409921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.409927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-07-08T11:58:53.375614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.378843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.378903Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.379642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.379704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.379747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.379768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.379785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.379805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.379829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.379849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.379872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.379895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.379920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.379955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.385196Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.385370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.385382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.385414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.385463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.385477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.385485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.385495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.385506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.385513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.385518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.385539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.385546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.385553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.385557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.385566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.385573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.385582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.385586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.385595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.385602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.385606Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.385629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.385636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.385639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.385657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.385665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.385669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.385682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.385689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.385693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.385701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.385710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.385717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.385721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.385757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:58:53.385769Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-07-08T11:58:53.385777Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.385788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:53.385798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.385811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.385818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.385823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.385836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.385841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... ge=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.191998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:55.192008Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:58:55.192017Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:58:55.192046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:58:55.192058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192102Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:55.192110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192118Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:392:2408] finished for tablet 9437184 2025-07-08T11:58:55.192167Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:391:2407];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975935190541,"name":"_full_task","f":1751975935190541,"d_finished":0,"c":0,"l":1751975935192129,"d":1588},"events":[{"name":"bootstrap","f":1751975935190576,"d_finished":323,"c":1,"l":1751975935190899,"d":323},{"a":1751975935192100,"name":"ack","f":1751975935191970,"d_finished":111,"c":1,"l":1751975935192081,"d":140},{"a":1751975935192098,"name":"processing","f":1751975935191039,"d_finished":650,"c":10,"l":1751975935192081,"d":681},{"name":"ProduceResults","f":1751975935190759,"d_finished":308,"c":13,"l":1751975935192121,"d":308},{"a":1751975935192121,"name":"Finish","f":1751975935192121,"d_finished":0,"c":0,"l":1751975935192129,"d":8},{"name":"task_result","f":1751975935191042,"d_finished":523,"c":9,"l":1751975935191944,"d":523}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192176Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:55.192209Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:391:2407];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975935190541,"name":"_full_task","f":1751975935190541,"d_finished":0,"c":0,"l":1751975935192181,"d":1640},"events":[{"name":"bootstrap","f":1751975935190576,"d_finished":323,"c":1,"l":1751975935190899,"d":323},{"a":1751975935192100,"name":"ack","f":1751975935191970,"d_finished":111,"c":1,"l":1751975935192081,"d":192},{"a":1751975935192098,"name":"processing","f":1751975935191039,"d_finished":650,"c":10,"l":1751975935192081,"d":733},{"name":"ProduceResults","f":1751975935190759,"d_finished":308,"c":13,"l":1751975935192121,"d":308},{"a":1751975935192121,"name":"Finish","f":1751975935192121,"d_finished":0,"c":0,"l":1751975935192181,"d":60},{"name":"task_result","f":1751975935191042,"d_finished":523,"c":9,"l":1751975935191944,"d":523}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.192219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:55.190450Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-07-08T11:58:55.192224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:55.192251Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> test.py::test[sampling-bind_expr-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-07-08T11:58:53.481087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.485057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.485116Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.485803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.485859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.485894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.485915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.485931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.485950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.485967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.485984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.486000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.486015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.486032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.486049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.491906Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.492078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.492091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.492121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.492167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.492179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.492185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.492194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.492203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.492209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.492214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.492236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.492244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.492250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.492255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.492263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.492270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.492277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.492282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.492290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.492296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.492302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.492324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.492331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.492335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.492353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.492359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.492363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.492375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.492381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.492385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.492393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.492400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.492406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.492410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.492448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:58:53.492458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.492466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.492477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:53.492487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.492498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.492507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.492512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.492523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.492529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... tage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:55.262787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:58:55.262796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:58:55.262827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:58:55.262836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:55.262876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:392:2408] finished for tablet 9437184 2025-07-08T11:58:55.262918Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:391:2407];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975935261552,"name":"_full_task","f":1751975935261552,"d_finished":0,"c":0,"l":1751975935262889,"d":1337},"events":[{"name":"bootstrap","f":1751975935261589,"d_finished":285,"c":1,"l":1751975935261874,"d":285},{"a":1751975935262868,"name":"ack","f":1751975935262757,"d_finished":97,"c":1,"l":1751975935262854,"d":118},{"a":1751975935262867,"name":"processing","f":1751975935261967,"d_finished":555,"c":10,"l":1751975935262854,"d":577},{"name":"ProduceResults","f":1751975935261775,"d_finished":247,"c":13,"l":1751975935262883,"d":247},{"a":1751975935262883,"name":"Finish","f":1751975935262883,"d_finished":0,"c":0,"l":1751975935262889,"d":6},{"name":"task_result","f":1751975935261969,"d_finished":447,"c":9,"l":1751975935262735,"d":447}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:55.262946Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:391:2407];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975935261552,"name":"_full_task","f":1751975935261552,"d_finished":0,"c":0,"l":1751975935262928,"d":1376},"events":[{"name":"bootstrap","f":1751975935261589,"d_finished":285,"c":1,"l":1751975935261874,"d":285},{"a":1751975935262868,"name":"ack","f":1751975935262757,"d_finished":97,"c":1,"l":1751975935262854,"d":157},{"a":1751975935262867,"name":"processing","f":1751975935261967,"d_finished":555,"c":10,"l":1751975935262854,"d":616},{"name":"ProduceResults","f":1751975935261775,"d_finished":247,"c":13,"l":1751975935262883,"d":247},{"a":1751975935262883,"name":"Finish","f":1751975935262883,"d_finished":0,"c":0,"l":1751975935262928,"d":45},{"name":"task_result","f":1751975935261969,"d_finished":447,"c":9,"l":1751975935262735,"d":447}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:55.262954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:55.261443Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-07-08T11:58:55.262959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:55.262978Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TTopicApiDescribes::GetLocalDescribe [GOOD] >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-07-08T11:58:49.913263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679320960667981:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:49.913644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:49.927144Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679318946670332:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:49.953438Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b09/r3tmp/tmp5ICBso/pdisk_1.dat 2025-07-08T11:58:49.965595Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:49.967111Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:50.000918Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:50.011943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.011967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.014085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19308, node 1 2025-07-08T11:58:50.025144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/000b09/r3tmp/yandexosRGKB.tmp 2025-07-08T11:58:50.025154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/000b09/r3tmp/yandexosRGKB.tmp 2025-07-08T11:58:50.025194Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/000b09/r3tmp/yandexosRGKB.tmp 2025-07-08T11:58:50.025232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:50.030820Z INFO: TTestServer started on Port 15047 GrpcPort 19308 TClient is connected to server localhost:15047 PQClient connected to localhost:19308 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:50.058159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:50.067948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.067970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.069071Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T11:58:50.069462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:58:50.093446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:50.196797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720661, at schemeshard: 72057594046644480 2025-07-08T11:58:50.327489Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679325255636121:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.327869Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTE1YTIzNDUtNTVkYjdlYTktMzg3OWEzM2YtZjIzZDdhZGI=, ActorId: [1:7524679325255636118:2291], ActorState: ExecuteState, TraceId: 01jzmyeadx00wbjbs98xk64s8k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.328275Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.327562Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679323241637774:2267], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.327916Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWIyNDY0NmQtZjk0MmE3MjItZTMyMWMxNDUtMTQ5NDAwYzM=, ActorId: [2:7524679323241637772:2266], ActorState: ExecuteState, TraceId: 01jzmyeadk7tstsb2c13hfdgay, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.328276Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.332780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.369589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.395441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T11:58:50.440680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyeahkey55kezn1kp1q7ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE3Y2E2NDQtMWM4YjQ1MzktZDk5OTljMmEtM2E2MjhhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679325255636543:2978] 2025-07-08T11:58:50.910389Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:50.913930Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:54.912662Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679320960667981:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:54.912701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:58:54.920335Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679318946670332:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:54.920371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-07-08T11:58:55.503454Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-07-08T11:58:55.519086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720678. Ctx: { TraceId: 01jzmyefggd1w7za6ztn4ykvyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM1ODdiNjAtNGUyMDdiOTQtYmI0YzA0N2QtOWNlMzAyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:55.520325Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7524679346730473664:3359] connected; active server actors: 1 2025-07-08T11:58:55.520511Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.520516Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.520523Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2025-07-08T11:58:55.520532Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-07-08T11:58:55.520558Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.520561Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2025-07-08T11:58:55.520577Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.520585Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2025-07-08T11:58:55.520585 ... nected TabletId 72057594046644480, NodeId 1, Generation 2 ===Query complete 2025-07-08T11:58:55.529721Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [1:7524679346730473995:2443] 2025-07-08T11:58:55.529830Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7524679346730473997:2445] 2025-07-08T11:58:55.532464Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7524679346730473997:2445] 2025-07-08T11:58:55.532577Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [1:7524679346730473995:2443] 2025-07-08T11:58:55.532836Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7524679346730473994:2442] 2025-07-08T11:58:55.530384Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7524679344716474930:2360] 2025-07-08T11:58:55.530497Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7524679344716474932:2362] 2025-07-08T11:58:55.533004Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7524679346730473996:2444] 2025-07-08T11:58:55.531787Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7524679344716474932:2362] 2025-07-08T11:58:55.531812Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7524679344716474930:2360] 2025-07-08T11:58:55.533363Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 1 [1:7524679346730473994:2442] 2025-07-08T11:58:55.533542Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7524679346730473996:2444] 2025-07-08T11:58:55.533923Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] bootstrapping 4 [1:7524679346730474022:2448] 2025-07-08T11:58:55.534393Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 1 [1:7524679346730474022:2448] 2025-07-08T11:58:55.534401Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] bootstrapping 5 [1:7524679346730474033:2450] 2025-07-08T11:58:55.534756Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [1:7524679346730474038:2452] 2025-07-08T11:58:55.535085Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7524679346730474033:2450] 2025-07-08T11:58:55.535246Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7524679346730474038:2452] 2025-07-08T11:58:55.540064Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.536443Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7524679344716474948:2366] 2025-07-08T11:58:55.536495Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7524679344716474950:2368] 2025-07-08T11:58:55.537201Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7524679344716474950:2368] 2025-07-08T11:58:55.537359Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 1 [2:7524679344716474948:2366] 2025-07-08T11:58:55.537610Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7524679344716474927:2357] 2025-07-08T11:58:55.537827Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7524679344716474928:2358] 2025-07-08T11:58:55.538010Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7524679344716474927:2357] 2025-07-08T11:58:55.538403Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7524679344716474928:2358] 2025-07-08T11:58:55.538979Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7524679344716474931:2361] 2025-07-08T11:58:55.539139Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7524679344716474934:2364] 2025-07-08T11:58:55.539514Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7524679344716474931:2361] 2025-07-08T11:58:55.539592Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7524679344716474934:2364] 2025-07-08T11:58:55.540120Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.540146Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.540996Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.541139Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.541645Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.542523Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.541074Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2025-07-08T11:58:55.546167Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679346730474297:3881]: Request location 2025-07-08T11:58:55.546299Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474307:3885] connected; active server actors: 1 2025-07-08T11:58:55.546384Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-07-08T11:58:55.546390Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-07-08T11:58:55.546393Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 1 2025-07-08T11:58:55.546395Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-07-08T11:58:55.546396Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 1 2025-07-08T11:58:55.546399Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-07-08T11:58:55.546401Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 1 2025-07-08T11:58:55.546403Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 1 2025-07-08T11:58:55.546405Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 1 2025-07-08T11:58:55.546408Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 1 2025-07-08T11:58:55.546410Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 1 2025-07-08T11:58:55.546413Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 1 2025-07-08T11:58:55.546415Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 1 2025-07-08T11:58:55.546418Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 1 2025-07-08T11:58:55.546420Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 1 2025-07-08T11:58:55.546449Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679346730474297:3881]: Got location 2025-07-08T11:58:55.546527Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474307:3885] disconnected; active server actors: 1 2025-07-08T11:58:55.546531Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474307:3885] disconnected no session 2025-07-08T11:58:55.546576Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679346730474309:3887]: Request location 2025-07-08T11:58:55.546642Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474311:3889] connected; active server actors: 1 2025-07-08T11:58:55.546688Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 1 2025-07-08T11:58:55.546693Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-07-08T11:58:55.546696Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 1 2025-07-08T11:58:55.546728Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679346730474309:3887]: Got location 2025-07-08T11:58:55.546823Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679346730474312:3890]: Request location 2025-07-08T11:58:55.548500Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474311:3889] disconnected; active server actors: 1 2025-07-08T11:58:55.548511Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474311:3889] disconnected no session 2025-07-08T11:58:55.548517Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679346730474314:3892] connected; active server actors: 1 >> TColumnShardTestReadWrite::ReadAggregate >> EvWrite::WriteWithSplit [GOOD] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> Normalizers::CleanEmptyPortionsNormalizer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-07-08T11:58:53.279018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.283106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.283168Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.283911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.283975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.284014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.284037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.284055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.284075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.284092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.284107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.284124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.284141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.284159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.284180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.290407Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.290782Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.290795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.290843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.290894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.290910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.290916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.290927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.290939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.290947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.290952Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.290983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.290992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.291000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.291005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.291015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.291022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.291030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.291034Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.291044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.291052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.291056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.291084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.291092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.291097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.291118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.291126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.291131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.291146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.291154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.291159Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.291167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.291174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.291181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.291186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.291231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:58:53.291241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.291252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=6; 2025-07-08T11:58:53.291265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:58:53.291275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.291288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.291297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.291302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.291315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.291322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... _results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:56.183456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:58:56.183468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:58:56.183508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:58:56.183522Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:56.183583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:970:2830] finished for tablet 9437184 2025-07-08T11:58:56.183640Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:969:2829];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975936181624,"name":"_full_task","f":1751975936181624,"d_finished":0,"c":0,"l":1751975936183604,"d":1980},"events":[{"name":"bootstrap","f":1751975936181671,"d_finished":386,"c":1,"l":1751975936182057,"d":386},{"a":1751975936183571,"name":"ack","f":1751975936183410,"d_finished":138,"c":1,"l":1751975936183548,"d":171},{"a":1751975936183569,"name":"processing","f":1751975936182213,"d_finished":838,"c":10,"l":1751975936183549,"d":873},{"name":"ProduceResults","f":1751975936181894,"d_finished":371,"c":13,"l":1751975936183595,"d":371},{"a":1751975936183595,"name":"Finish","f":1751975936183595,"d_finished":0,"c":0,"l":1751975936183604,"d":9},{"name":"task_result","f":1751975936182216,"d_finished":676,"c":9,"l":1751975936183374,"d":676}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183647Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:56.183671Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:969:2829];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1751975936181624,"name":"_full_task","f":1751975936181624,"d_finished":0,"c":0,"l":1751975936183651,"d":2027},"events":[{"name":"bootstrap","f":1751975936181671,"d_finished":386,"c":1,"l":1751975936182057,"d":386},{"a":1751975936183571,"name":"ack","f":1751975936183410,"d_finished":138,"c":1,"l":1751975936183548,"d":218},{"a":1751975936183569,"name":"processing","f":1751975936182213,"d_finished":838,"c":10,"l":1751975936183549,"d":920},{"name":"ProduceResults","f":1751975936181894,"d_finished":371,"c":13,"l":1751975936183595,"d":371},{"a":1751975936183595,"name":"Finish","f":1751975936183595,"d_finished":0,"c":0,"l":1751975936183651,"d":56},{"name":"task_result","f":1751975936182216,"d_finished":676,"c":9,"l":1751975936183374,"d":676}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.183679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:56.181500Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-07-08T11:58:56.183684Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:56.183706Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TTopicApiDescribes::DescribeConsumer [GOOD] >> Normalizers::EmptyTablesNormalizer ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-07-08T11:58:55.271294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:55.275047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:55.275117Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:55.275899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:55.275961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:55.276000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:55.276016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:55.276028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:55.276040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:55.276052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:55.276065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:55.276076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:55.276087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:55.276099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:55.276114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:55.282055Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:55.282132Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:55.282141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:55.282175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:55.282231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:55.282243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:55.282247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:55.282253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:55.282258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:55.282264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:55.282267Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:55.282278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:55.282300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:55.282308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:55.282312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:55.282321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:55.282328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:55.282335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:55.282337Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:55.282346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:55.282354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:55.282358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:55.282385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:55.282392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:55.282397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:55.282415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:55.282421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:55.282423Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:55.282436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:55.282443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:55.282448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:55.282456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:55.282464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:55.282471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:55.282475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:55.282518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:58:55.282528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:55.282551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=19; 2025-07-08T11:58:55.282564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:58:55.282575Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:55.282587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:55.282595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:55.282600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:55.282613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:55.282620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... canId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:56.296576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.296584Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:58:56.296592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:223;stage=no data is ready yet;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.296701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:58:56.296708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:58:56.296715Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-07-08T11:58:56.296731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1024;merger=0;interval_id=1; 2025-07-08T11:58:56.296738Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:58:56.296747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.296753Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=1024;finished=1; 2025-07-08T11:58:56.296762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=1024; 2025-07-08T11:58:56.296777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=8400896;num_rows=1024;batch_columns=key,field; 2025-07-08T11:58:56.296828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:845:2862];bytes=8400896;rows=1024;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-07-08T11:58:56.296842Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.296853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.296862Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.298450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:56.298478Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.298487Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.298495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:851:2868] finished for tablet 9437184 2025-07-08T11:58:56.298590Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:845:2862];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.092},{"events":["l_task_result"],"t":0.111},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.113}],"full":{"a":1751975936185456,"name":"_full_task","f":1751975936185456,"d_finished":0,"c":0,"l":1751975936298507,"d":113051},"events":[{"name":"bootstrap","f":1751975936185541,"d_finished":371,"c":1,"l":1751975936185912,"d":371},{"a":1751975936298441,"name":"ack","f":1751975936277743,"d_finished":17371,"c":2,"l":1751975936296603,"d":17437},{"a":1751975936298433,"name":"processing","f":1751975936186758,"d_finished":28094,"c":11,"l":1751975936296875,"d":28168},{"name":"ProduceResults","f":1751975936185777,"d_finished":17656,"c":15,"l":1751975936298491,"d":17656},{"a":1751975936298493,"name":"Finish","f":1751975936298493,"d_finished":0,"c":0,"l":1751975936298507,"d":14},{"name":"task_result","f":1751975936186764,"d_finished":10664,"c":9,"l":1751975936296874,"d":10664}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.298603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:845:2862];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:56.298644Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:845:2862];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.092},{"events":["l_task_result"],"t":0.111},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.113}],"full":{"a":1751975936185456,"name":"_full_task","f":1751975936185456,"d_finished":0,"c":0,"l":1751975936298609,"d":113153},"events":[{"name":"bootstrap","f":1751975936185541,"d_finished":371,"c":1,"l":1751975936185912,"d":371},{"a":1751975936298441,"name":"ack","f":1751975936277743,"d_finished":17371,"c":2,"l":1751975936296603,"d":17539},{"a":1751975936298433,"name":"processing","f":1751975936186758,"d_finished":28094,"c":11,"l":1751975936296875,"d":28270},{"name":"ProduceResults","f":1751975936185777,"d_finished":17656,"c":15,"l":1751975936298491,"d":17656},{"a":1751975936298493,"name":"Finish","f":1751975936298493,"d_finished":0,"c":0,"l":1751975936298609,"d":116},{"name":"task_result","f":1751975936186764,"d_finished":10664,"c":9,"l":1751975936296874,"d":10664}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:56.298665Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:56.185290Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17137952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17137952;selected_rows=0; 2025-07-08T11:58:56.298671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:56.298721Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:851:2868];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TTopicApiDescribes::GetPartitionDescribe [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-07-08T11:58:49.808490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679322534159476:2236];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:49.808558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:49.811568Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679319027013308:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:49.811649Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:49.838999Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:49.841876Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000aca/r3tmp/tmpksPyc4/pdisk_1.dat 2025-07-08T11:58:49.873777Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20549, node 1 2025-07-08T11:58:49.893255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/000aca/r3tmp/yandex2nacko.tmp 2025-07-08T11:58:49.893269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/000aca/r3tmp/yandex2nacko.tmp 2025-07-08T11:58:49.896167Z INFO: TTestServer started on Port 18369 GrpcPort 20549 2025-07-08T11:58:49.906506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:49.906541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:49.908844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:49.921126Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/000aca/r3tmp/yandex2nacko.tmp 2025-07-08T11:58:49.921290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18369 PQClient connected to localhost:20549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:49.950093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:49.950276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:49.950294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:49.952197Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T11:58:49.952564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-07-08T11:58:49.966528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:50.064856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720660, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:50.151788Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679323321980705:2267], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.152097Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDEzMmI4NzUtY2MwYjdlNjItNWFmNjBjNTAtNGUzMDY3ZjY=, ActorId: [2:7524679323321980703:2266], ActorState: ExecuteState, TraceId: 01jzmyea8kfgfa2xe3xbq9mw54, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.153449Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.167516Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679326829127541:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.167834Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRjZGUwMzQtZTJhMmJjMS1iNmQ2MjNiNC1lMDQ0ZDBkZA==, ActorId: [1:7524679326829127538:2291], ActorState: ExecuteState, TraceId: 01jzmyea9je6xyz5qjccp66vkw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.167932Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.170050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.188767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.217493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T11:58:50.265479Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyeabvch2k65vmmdzt26rb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY1MDI4N2QtNjBhZmQ0NWEtYTBlNTg0N2MtNTAxZWRlMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679326829127964:2991] 2025-07-08T11:58:50.807984Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:50.811059Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:54.808234Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679322534159476:2236];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:54.808275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:58:54.811308Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679319027013308:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:54.811341Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-07-08T11:58:55.345327Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-07-08T11:58:55.365936Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7524679348303965112:3397] connected; active server actors: 1 2025-07-08T11:58:55.367681Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] updating configuration. Deleted partitions []. Added partitions [8, 12, 7, 13, 4, 5, 10, 1, 6, 14, 11, 9, 2, 3, 0] 2025-07-08T11:58:55.367871Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037900 2025-07-08T11:58:55.368527Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 1 2025-07-08T11:58:55.362926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720678. Ctx: { TraceId: 01jzmyefbj9yv21r44pnpwat7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU3ZDQ5MjItYzJjY2VjY2UtNTY2ZGIyMmItNzEwZTFkYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:55.364723Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.364732Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2025-07-08T11:58:55.370116Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.370138Z no ... partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1751975935 nanos: 429000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1751975935 nanos: 426000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1751975935 nanos: 428000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1751975935 nanos: 429000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1751975935 nanos: 431000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1751975935 nanos: 428000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1751975935 nanos: 431000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1751975935 nanos: 426000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1751975935 nanos: 428000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } } } } 2025-07-08T11:58:56.383982Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-07-08T11:58:56.384019Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-07-08T11:58:56.384235Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679352598934057:2562]: Request location 2025-07-08T11:58:56.384382Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679352598934059:2563] connected; active server actors: 1 2025-07-08T11:58:56.384518Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-07-08T11:58:56.384536Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-07-08T11:58:56.384538Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-07-08T11:58:56.384540Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-07-08T11:58:56.384542Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-07-08T11:58:56.384543Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-07-08T11:58:56.384545Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-07-08T11:58:56.384547Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-07-08T11:58:56.384549Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-07-08T11:58:56.384550Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-07-08T11:58:56.384551Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-07-08T11:58:56.384553Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-07-08T11:58:56.384555Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-07-08T11:58:56.384556Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-07-08T11:58:56.384558Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-07-08T11:58:56.384619Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679352598934057:2562]: Got location 2025-07-08T11:58:56.384710Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679352598934059:2563] disconnected; active server actors: 1 2025-07-08T11:58:56.384719Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679352598934059:2563] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1751975935423 tx_id: 281474976720677 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-07-08T11:58:56.385425Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-07-08T11:58:56.385446Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1751975935423 tx_id: 281474976720677 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-07-08T11:58:56.386123Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-07-08T11:58:56.386141Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-07-08T11:58:54.369240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:54.371773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:54.371815Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:54.372508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:54.372565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:54.372599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:54.372617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:54.372631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:54.372649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:54.372665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:54.372682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:54.372698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:54.372714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.372731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:54.372748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:54.378085Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:54.378317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:54.378329Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:54.378370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.378417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:54.378433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:54.378438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:54.378448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:54.378456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:54.378462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:54.378466Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:54.378487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.378494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:54.378501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:54.378505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:54.378514Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:54.378520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:54.378526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:54.378530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:54.378538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:54.378545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:54.378549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:54.378571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:54.378577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:54.378582Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:54.378600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:54.378607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:54.378612Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:54.378625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:54.378632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.378636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.378643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:54.378650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:54.378657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:54.378661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:54.378698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:58:54.378708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:54.378715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:54.378726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:54.378735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:54.378746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:54.378752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:54.378757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:54.378770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:54.378776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... ge=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913022Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:56.913031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:58:56.913041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:58:56.913070Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:58:56.913081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913101Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:56.913131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913139Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913143Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:970:2830] finished for tablet 9437184 2025-07-08T11:58:56.913185Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:969:2829];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975936911557,"name":"_full_task","f":1751975936911557,"d_finished":0,"c":0,"l":1751975936913149,"d":1592},"events":[{"name":"bootstrap","f":1751975936911591,"d_finished":303,"c":1,"l":1751975936911894,"d":303},{"a":1751975936913121,"name":"ack","f":1751975936912994,"d_finished":110,"c":1,"l":1751975936913104,"d":138},{"a":1751975936913120,"name":"processing","f":1751975936912023,"d_finished":672,"c":10,"l":1751975936913104,"d":701},{"name":"ProduceResults","f":1751975936911767,"d_finished":291,"c":13,"l":1751975936913141,"d":291},{"a":1751975936913142,"name":"Finish","f":1751975936913142,"d_finished":0,"c":0,"l":1751975936913149,"d":7},{"name":"task_result","f":1751975936912026,"d_finished":548,"c":9,"l":1751975936912965,"d":548}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:56.913223Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:969:2829];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975936911557,"name":"_full_task","f":1751975936911557,"d_finished":0,"c":0,"l":1751975936913198,"d":1641},"events":[{"name":"bootstrap","f":1751975936911591,"d_finished":303,"c":1,"l":1751975936911894,"d":303},{"a":1751975936913121,"name":"ack","f":1751975936912994,"d_finished":110,"c":1,"l":1751975936913104,"d":187},{"a":1751975936913120,"name":"processing","f":1751975936912023,"d_finished":672,"c":10,"l":1751975936913104,"d":750},{"name":"ProduceResults","f":1751975936911767,"d_finished":291,"c":13,"l":1751975936913141,"d":291},{"a":1751975936913142,"name":"Finish","f":1751975936913142,"d_finished":0,"c":0,"l":1751975936913198,"d":56},{"name":"task_result","f":1751975936912026,"d_finished":548,"c":9,"l":1751975936912965,"d":548}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:56.913233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:56.911451Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2025-07-08T11:58:56.913238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:56.913264Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::WriteReadNoCompression ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-07-08T11:58:50.167680Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679325879776391:2075];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:50.167809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:50.170991Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679325417603353:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:50.171013Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a89/r3tmp/tmpHMoMay/pdisk_1.dat 2025-07-08T11:58:50.202773Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:50.211816Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:50.234635Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9348, node 1 2025-07-08T11:58:50.258046Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/000a89/r3tmp/yandexIvVvh2.tmp 2025-07-08T11:58:50.258060Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/000a89/r3tmp/yandexIvVvh2.tmp 2025-07-08T11:58:50.262752Z INFO: TTestServer started on Port 4968 GrpcPort 9348 2025-07-08T11:58:50.267638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.267662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.269311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4968 PQClient connected to localhost:9348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T11:58:50.291270Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/000a89/r3tmp/yandexIvVvh2.tmp 2025-07-08T11:58:50.291354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:50.302384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.302407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.303810Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T11:58:50.304072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.304117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-07-08T11:58:50.317621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:50.506089Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679325417603640:2267], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.506169Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjgyZDg5MzYtMTIwM2VlZWMtZTkwOGM2YWUtYmNkMmQ5YTM=, ActorId: [2:7524679325417603638:2266], ActorState: ExecuteState, TraceId: 01jzmyeakd897rt1m0ry649gqm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.507727Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.537923Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679325879777320:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.538432Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGU3NGRhZTktZDcwMjY5NzItYzQxY2Q0YTgtYTE0ZDhkNjA=, ActorId: [1:7524679325879777318:2291], ActorState: ExecuteState, TraceId: 01jzmyean44e6h6nnkmmam13y7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.538570Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.539627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.609846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.635307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T11:58:50.673071Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyeart4z8ghaf7rf25svft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI2YWVjMzgtOTVkNWRjNTEtZDQwZDgxNGEtZmY4YjgyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679325879777739:2974] 2025-07-08T11:58:51.168681Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:51.172764Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:55.167889Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679325879776391:2075];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:55.167924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:58:55.171466Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679325417603353:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:55.171499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-07-08T11:58:55.704264Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-07-08T11:58:55.717577Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720678. Ctx: { TraceId: 01jzmyefps40q62j3v53s5h5by, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmVhNzdlYWYtZGE0OWRmNDUtMmY1ZjYxYWQtYTNiYzU5YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:55.718147Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.718164Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2025-07-08T11:58:55.718344Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.718357Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-07-08T11:58:55.718515Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.718532Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2025-07-08T11:58:55.718713Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7524679347354614863:3356] connected; active server actors: 1 2025-07-08T11:58:55.718837Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:55.718847Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2025-07-08T11:58:55.719074Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] updating configuration. Deleted partitions []. Added partitions [8, 12, 7, 13, 4, 5, 1 ... 186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7524679346892441297:2410] 2025-07-08T11:58:55.756249Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] bootstrapping 1 [1:7524679347354615858:2492] 2025-07-08T11:58:55.756665Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] bootstrapping 6 [1:7524679347354615860:2494] 2025-07-08T11:58:55.756692Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7524679346892441301:2412] 2025-07-08T11:58:55.756753Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7524679346892441302:2413] 2025-07-08T11:58:55.756988Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:5:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.756999Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 2 [1:7524679347354615859:2493] 2025-07-08T11:58:55.757166Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] bootstrapping 11 [1:7524679347354615866:2497] 2025-07-08T11:58:55.757630Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.757640Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7524679347354615864:2496] 2025-07-08T11:58:55.757754Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.757765Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7524679346892441256:2402] 2025-07-08T11:58:55.758097Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:10:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758113Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 2 [1:7524679347354615861:2495] 2025-07-08T11:58:55.758161Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758171Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 1 generation 2 [1:7524679347354615858:2492] 2025-07-08T11:58:55.758307Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758317Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7524679347354615860:2494] 2025-07-08T11:58:55.758340Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:4:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758347Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [1:7524679347354615854:2491] 2025-07-08T11:58:55.758583Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758592Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 11, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7524679347354615866:2497] 2025-07-08T11:58:55.758583Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-07-08T11:58:55.758599Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2025-07-08T11:58:55.758602Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-07-08T11:58:55.758643Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758653Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7524679346892441270:2407] 2025-07-08T11:58:55.758675Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758682Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7524679346892441269:2406] 2025-07-08T11:58:55.758703Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758713Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7524679346892441255:2401] 2025-07-08T11:58:55.758729Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-07-08T11:58:55.758740Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-07-08T11:58:55.758802Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-07-08T11:58:55.758814Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.758816Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7524679346892441288:2409] 2025-07-08T11:58:55.759172Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.759177Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.759181Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7524679346892441302:2413] 2025-07-08T11:58:55.759185Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7524679346892441297:2410] 2025-07-08T11:58:55.759247Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-07-08T11:58:55.759259Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-07-08T11:58:55.759262Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7524679346892441301:2412] 2025-07-08T11:58:55.759296Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-07-08T11:58:56.737555Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-07-08T11:58:56.737595Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-07-08T11:58:56.737610Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7524679351649583413:2523]: Bootstrap 2025-07-08T11:58:56.737747Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679351649583413:2523]: Request location 2025-07-08T11:58:56.737886Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583415:2524] connected; active server actors: 1 2025-07-08T11:58:56.737979Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-07-08T11:58:56.738038Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679351649583413:2523]: Got location 2025-07-08T11:58:56.738144Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583415:2524] disconnected; active server actors: 1 2025-07-08T11:58:56.738158Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583415:2524] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-07-08T11:58:56.738689Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-07-08T11:58:56.738728Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-07-08T11:58:56.738745Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7524679351649583416:2525]: Bootstrap 2025-07-08T11:58:56.738882Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679351649583416:2525]: Request location 2025-07-08T11:58:56.739028Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583419:2527] connected; active server actors: 1 2025-07-08T11:58:56.739123Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-07-08T11:58:56.739171Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679351649583416:2525]: Got location 2025-07-08T11:58:56.739266Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583419:2527] disconnected; active server actors: 1 2025-07-08T11:58:56.739276Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679351649583419:2527] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1751975935 nanos: 755000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-07-08T11:58:56.739879Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-07-08T11:58:56.739905Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-07-08T11:58:56.739920Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7524679351649583421:2528]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> EvWrite::WriteInTransaction >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> Normalizers::EmptyTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> TTopicApiDescribes::DescribeTopic [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] Test command err: 2025-07-08T11:58:53.165801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.168262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.168304Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.168772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.168811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.168835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.168849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.168860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.168873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.168885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.168895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.168905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.168916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.168926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.168938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.173767Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.173946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.173956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.173980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.174034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.174046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.174050Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.174057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.174064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.174070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.174072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.174087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.174093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.174098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.174101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.174107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.174111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.174116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.174119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.174124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.174129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.174132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.174148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.174152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.174155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.174170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.174175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.174177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.174186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.174191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.174193Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.174198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.174203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.174208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.174210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.174240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2025-07-08T11:58:53.174249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.174254Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:58:53.174263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:53.174269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.174277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.174283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.174287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.174296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.174300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... SHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1692;count=35; 2025-07-08T11:58:57.631792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3420;count=36;size_of_meta=112; 2025-07-08T11:58:57.631803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=4716;count=18;size_of_portion=184; 2025-07-08T11:58:57.632027Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-07-08T11:58:57.632053Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=19;operation_id=18; 2025-07-08T11:58:57.642778Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-07-08T11:58:57.644633Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=ecf870a0-5bf211f0-82720bdc-29c9e0cc; 2025-07-08T11:58:57.644692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1786;count=37; 2025-07-08T11:58:57.644708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3610;count=38;size_of_meta=112; 2025-07-08T11:58:57.644719Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=4978;count=19;size_of_portion=184; 2025-07-08T11:58:57.644939Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-07-08T11:58:57.644983Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=20;operation_id=19; 2025-07-08T11:58:57.655783Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-07-08T11:58:57.657911Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=ed13583e-5bf211f0-9cecb382-9d6f0ad; 2025-07-08T11:58:57.657972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1880;count=39; 2025-07-08T11:58:57.657990Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3800;count=40;size_of_meta=112; 2025-07-08T11:58:57.658001Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5240;count=20;size_of_portion=184; 2025-07-08T11:58:57.658238Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-07-08T11:58:57.658266Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=21;operation_id=20; 2025-07-08T11:58:57.671132Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-07-08T11:58:57.672553Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=ed2e4414-5bf211f0-a73fb3ba-c3dd4355; 2025-07-08T11:58:57.672602Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1974;count=41; 2025-07-08T11:58:57.672612Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3990;count=42;size_of_meta=112; 2025-07-08T11:58:57.672619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5502;count=21;size_of_portion=184; 2025-07-08T11:58:57.672804Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-07-08T11:58:57.672825Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=22;operation_id=21; 2025-07-08T11:58:57.683570Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-07-08T11:58:57.686148Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=22;last=22; 2025-07-08T11:58:57.686171Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:14;writing_size=6330728;operation_id=edb688ba-5bf211f0-9617642f-e16c50bb;in_flight=1;size_in_flight=6330728; 2025-07-08T11:58:57.852686Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:125:2157];write_id=22;path_id={internal: 9438184000001, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-07-08T11:58:57.872333Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=edb688ba-5bf211f0-9617642f-e16c50bb; 2025-07-08T11:58:57.872418Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=2068;count=43; 2025-07-08T11:58:57.872438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=4180;count=44;size_of_meta=112; 2025-07-08T11:58:57.872448Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5764;count=22;size_of_portion=184; 2025-07-08T11:58:57.872692Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-07-08T11:58:57.872720Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=23;operation_id=22; 2025-07-08T11:58:57.884175Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-07-08T11:58:49.979481Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679321285278910:2161];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:49.979589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:50.024425Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000aaf/r3tmp/tmpg7UhC8/pdisk_1.dat 2025-07-08T11:58:50.028238Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:50.037217Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T11:58:50.065238Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28229, node 1 2025-07-08T11:58:50.082215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.082238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.085291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:50.089166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/000aaf/r3tmp/yandexCazpgy.tmp 2025-07-08T11:58:50.089182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/000aaf/r3tmp/yandexCazpgy.tmp 2025-07-08T11:58:50.092064Z INFO: TTestServer started on Port 22714 GrpcPort 28229 2025-07-08T11:58:50.100176Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/000aaf/r3tmp/yandexCazpgy.tmp 2025-07-08T11:58:50.100298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22714 PQClient connected to localhost:28229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:50.127417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:50.131830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.131855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.133184Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T11:58:50.133527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:58:50.193762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:50.362689Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679325580247062:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.363101Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzU0ZGJjMjMtMTZjNTQ0ZGUtMjYyM2JhNi03ZTVhYThiNQ==, ActorId: [1:7524679325580247059:2291], ActorState: ExecuteState, TraceId: 01jzmyeaetekxg51ps4bg0m9fa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.363353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.363685Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679326889041133:2267], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.363783Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTliZjc2Y2EtNjEzYjhkZjctY2I1NjBiOTEtYmFkZjA0ZWM=, ActorId: [2:7524679326889041131:2266], ActorState: ExecuteState, TraceId: 01jzmyeafc3h2pnjm4njwh74m2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.363917Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.363429Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.381053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.449185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T11:58:50.543019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyeamq07hmzza6814nzegz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjE3MmFlNmMtOGNiNDU0ZDUtNzRmMzQzYjEtNDVkZTlhNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679325580247490:2991] 2025-07-08T11:58:50.977624Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:50.985275Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:54.978877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679321285278910:2161];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:54.978923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-07-08T11:58:56.569730Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-07-08T11:58:56.584022Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:56.584047Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893] doesn't have tx writes info 2025-07-08T11:58:56.584090Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:56.584098Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037896] doesn't have tx writes info 2025-07-08T11:58:56.584126Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:56.584132Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898] doesn't have tx writes info 2025-07-08T11:58:56.584163Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:56.584174Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899] doesn't have tx writes info 2025-07-08T11:58:56.584652Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][] pipe [1:7524679351350052020:3433] connected; active server actors: 1 2025-07-08T11:58:56.584753Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] updating configuration. Deleted partitions []. Added partitions [8, 12, 7, 13, 4, 5, 10, 1, 6, 14, 11, 9, 2, 3, 0] 2025-07-08T11:58:56.585213Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720681. Ctx: { TraceId: 01jzmyeght4d13cgg0z5h4y4a9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzIxM2Q2MmMtNGZlMzM4MzYtOGE4YTFhNWYtOGE3NzRlMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:58:56.585027Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037900 2025-07-08T11:58:56.585479Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 1 2025-07-08T11:58:56.585483Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 1 20 ... etention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1751975936 nanos: 627000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1751975936 nanos: 638000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-07-08T11:58:57.594375Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-07-08T11:58:57.594397Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-07-08T11:58:57.594425Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-07-08T11:58:57.594553Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679355645020569:2559]: Request location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1751975936641 tx_id: 281474976720680 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1751975936641 tx_id: 281474976720680 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-07-08T11:58:57.599223Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679355645020571:2560] connected; active server actors: 1 2025-07-08T11:58:57.599407Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7524679355645020569:2559]: Got location 2025-07-08T11:58:57.599250Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-07-08T11:58:57.600258Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-07-08T11:58:57.599253Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-07-08T11:58:57.600270Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-07-08T11:58:57.599256Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-07-08T11:58:57.600287Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-07-08T11:58:57.599259Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-07-08T11:58:57.600966Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-07-08T11:58:57.600975Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-07-08T11:58:57.599261Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-07-08T11:58:57.601000Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic 2025-07-08T11:58:57.599263Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-07-08T11:58:57.599266Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-07-08T11:58:57.599268Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-07-08T11:58:57.599272Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-07-08T11:58:57.599274Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-07-08T11:58:57.599276Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-07-08T11:58:57.599278Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-07-08T11:58:57.599280Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-07-08T11:58:57.599285Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-07-08T11:58:57.599287Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-07-08T11:58:57.599563Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679355645020571:2560] disconnected; active server actors: 1 2025-07-08T11:58:57.599565Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7524679355645020571:2560] disconnected no session Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> EvWrite::WriteInTransaction [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-07-08T11:58:56.977796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:56.982038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:56.982098Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:56.982846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-07-08T11:58:56.982894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.982938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:56.982962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:56.982980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:56.982997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:56.983014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:56.983048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:56.983067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:56.983084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:56.983102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.983123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:56.983145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:56.988729Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:56.988765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-07-08T11:58:56.988773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.988814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:56.988821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:58:56.988828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:56.988837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:56.988872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.988881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:58:56.988886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:58:56.988895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.988903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:56.988907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:56.988910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:58:56.988916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:56.988921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:56.988925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:56.988928Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:56.988937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.988943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:56.988973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:56.988978Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:56.988990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:56.988995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:56.988999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:56.989002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:56.989007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:56.989012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:56.989015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:56.989021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:56.989029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:56.989032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:56.989053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:56.989060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:56.989064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:56.989073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:56.989077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.989079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.989083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:56.989088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:56.989090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:56.989096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:56.989101Z node 1 :TX_COLUMNSH ... nager;fline=common_data.cpp:29;TablesLoadingTime=4; 2025-07-08T11:58:57.757455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:57.757463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:58:57.757489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=tables_manager.cpp:195;event=load_preset;preset_id=1;snapshot={1751975937975:10};version=1; 2025-07-08T11:58:57.757495Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=tables_manager.cpp:199;event=index_schema;preset_id=1;snapshot={1751975937975:10};version=1; 2025-07-08T11:58:57.758015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=530; 2025-07-08T11:58:57.758035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=612; 2025-07-08T11:58:57.758156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:column_enginesLoadingTime=2; 2025-07-08T11:58:57.758177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=11; 2025-07-08T11:58:57.758196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=12; 2025-07-08T11:58:57.758205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=4; 2025-07-08T11:58:57.758210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=2; 2025-07-08T11:58:57.758214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:58:57.758217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=1; 2025-07-08T11:58:57.758220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=58; 2025-07-08T11:58:57.758229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=6; 2025-07-08T11:58:57.758239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-07-08T11:58:57.758250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=8; 2025-07-08T11:58:57.758257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2025-07-08T11:58:57.758273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13; 2025-07-08T11:58:57.759042Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=764; 2025-07-08T11:58:57.759051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-07-08T11:58:57.759055Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=0; 2025-07-08T11:58:57.759058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:58:57.759067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=5; 2025-07-08T11:58:57.759072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:58:57.759079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=4; 2025-07-08T11:58:57.759083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T11:58:57.759090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=3; 2025-07-08T11:58:57.759096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=3; 2025-07-08T11:58:57.759100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=2; 2025-07-08T11:58:57.759103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=1776; 2025-07-08T11:58:57.759117Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:58:57.759134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:358:2371];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:58:57.759139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:358:2371];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:58:57.759147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:58:57.759150Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:58:57.759155Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:58:57.759164Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:58:57.760455Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:58:57.760467Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:57.760471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:57.760482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:58:57.761007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:58:57.761021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:58:57.761025Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:58:57.761027Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:58:57.761031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:58:57.761038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:58:57.761043Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:58:57.761047Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:57.761050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:58:57.761057Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:58:57.809806Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {1751975938020:111} readable: {1751975938020:max} at tablet 9437184 2025-07-08T11:58:57.809834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:358:2371];ev=NKikimr::TEvDataShard::TEvKqpScan;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=1;result=not_found; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramNoProjection >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] [SKIPPED] >> test.py::test[sampling-join_left_sample-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-07-08T11:58:57.895577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:57.899829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:57.899891Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:57.900675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:57.900737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:57.900774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:57.900796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:57.900815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:57.900833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:57.900852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:57.900872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:57.900889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:57.900907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.900926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:57.900964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:57.907096Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:57.907180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:57.907190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:57.907230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.907292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:57.907309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:57.907314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:57.907323Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:57.907332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:57.907340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:57.907344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:57.907365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.907372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:57.907379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:57.907383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:57.907393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:57.907399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:57.907406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:57.907410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:57.907419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:57.907426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:57.907433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:57.907457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:57.907465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:57.907469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:57.907488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:57.907497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:57.907501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:57.907514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:57.907521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.907525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.907533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:57.907540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:57.907548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:57.907552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:57.907604Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2025-07-08T11:58:57.907614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:57.907628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=9; 2025-07-08T11:58:57.907640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:58:57.907651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:57.907664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:57.907673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:57.907679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:57.907692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:57.907698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... sult=0;count=0;finished=0; 2025-07-08T11:58:58.468787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:58:58.468892Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:58:58.468898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:58:58.468903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-07-08T11:58:58.468916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-07-08T11:58:58.468925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:58:58.468933Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.468938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-07-08T11:58:58.468943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:58:58.468998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:58.469019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:58.469034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-07-08T11:58:58.469045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-07-08T11:58:58.469090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:281:2298];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-07-08T11:58:58.469103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469122Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:58.469200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469206Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:282:2299] finished for tablet 9437184 2025-07-08T11:58:58.469278Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:281:2298];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.005}],"full":{"a":1751975938463828,"name":"_full_task","f":1751975938463828,"d_finished":0,"c":0,"l":1751975938469222,"d":5394},"events":[{"name":"bootstrap","f":1751975938463864,"d_finished":458,"c":1,"l":1751975938464322,"d":458},{"a":1751975938469190,"name":"ack","f":1751975938468992,"d_finished":133,"c":1,"l":1751975938469125,"d":165},{"a":1751975938469188,"name":"processing","f":1751975938464678,"d_finished":2717,"c":9,"l":1751975938469125,"d":2751},{"name":"ProduceResults","f":1751975938464136,"d_finished":325,"c":12,"l":1751975938469209,"d":325},{"a":1751975938469209,"name":"Finish","f":1751975938469209,"d_finished":0,"c":0,"l":1751975938469222,"d":13},{"name":"task_result","f":1751975938464684,"d_finished":2556,"c":8,"l":1751975938468966,"d":2556}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469289Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:281:2298];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:58.469321Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:281:2298];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.005}],"full":{"a":1751975938463828,"name":"_full_task","f":1751975938463828,"d_finished":0,"c":0,"l":1751975938469295,"d":5467},"events":[{"name":"bootstrap","f":1751975938463864,"d_finished":458,"c":1,"l":1751975938464322,"d":458},{"a":1751975938469190,"name":"ack","f":1751975938468992,"d_finished":133,"c":1,"l":1751975938469125,"d":238},{"a":1751975938469188,"name":"processing","f":1751975938464678,"d_finished":2717,"c":9,"l":1751975938469125,"d":2824},{"name":"ProduceResults","f":1751975938464136,"d_finished":325,"c":12,"l":1751975938469209,"d":325},{"a":1751975938469209,"name":"Finish","f":1751975938469209,"d_finished":0,"c":0,"l":1751975938469295,"d":86},{"name":"task_result","f":1751975938464684,"d_finished":2556,"c":8,"l":1751975938468966,"d":2556}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:58:58.469334Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:58.463708Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=237240;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=237240;selected_rows=0; 2025-07-08T11:58:58.469339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:58.469369Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:282:2299];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TxUsage::WriteToTopic_Demo_11 [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:58:17.622883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:17.622906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.622912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:17.622916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:17.622930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:17.622934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:17.622942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:17.622955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:17.623035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:17.636049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:58:17.636067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:17.650078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:17.650144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:17.650171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:17.657672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:17.657745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:17.657865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.658333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:17.661312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.661365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:17.661648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.661659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.661677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:17.661684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.661691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:17.661717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.663071Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:58:17.683530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:17.683620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.683678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:17.683744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:17.683758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.684301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.684324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:17.684362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.684370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:17.684375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:17.684380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:17.684692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.684702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:17.684707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:17.685461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.685474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.685481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.685488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.686078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:17.686422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:17.686458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:17.686653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:17.686674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:17.686683Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.686742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:17.686758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:17.686783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:17.686794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:17.687144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:17.687151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:17.687192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:17.687197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:17.687207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:17.687212Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:17.687223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.687227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.687232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:17.687235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.687240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:17.687245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:17.687249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:58:17.687254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:58:17.687263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:58:17.687269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:58:17.687273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:58:17.687644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:58:17.687658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-07-08T11:58:47.734370Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-07-08T11:58:47.734406Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-07-08T11:58:47.734411Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-07-08T11:58:47.734414Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-07-08T11:58:47.744588Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-07-08T11:58:51.322577Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.001 2025-07-08T11:58:51.343357Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0009 2025-07-08T11:58:51.379350Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-07-08T11:58:51.379431Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-07-08T11:58:51.379451Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-07-08T11:58:51.379462Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-07-08T11:58:51.379500Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-07-08T11:58:51.379508Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-07-08T11:58:51.379512Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-07-08T11:58:51.389715Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-07-08T11:58:54.864334Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0009 2025-07-08T11:58:54.885167Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0008 2025-07-08T11:58:54.926039Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-07-08T11:58:54.926120Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-07-08T11:58:54.926143Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-07-08T11:58:54.926154Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-07-08T11:58:54.926199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-07-08T11:58:54.926207Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-07-08T11:58:54.926211Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-07-08T11:58:54.936394Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-07-08T11:58:58.259213Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:574:2532], attempt# 1 2025-07-08T11:58:58.262262Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:573:2531] 2025-07-08T11:58:58.263457Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:574:2532], sender# [4:573:2531] 2025-07-08T11:58:58.263471Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:573:2531] 2025-07-08T11:58:58.263499Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:574:2532], sender# [4:573:2531], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-07-08T11:58:58.263554Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:574:2532], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14956 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C8D99B98-91AD-4333-9C6E-EDE1006A7834 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-07-08T11:58:58.264677Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:574:2532], result# 2025-07-08T11:58:58.264743Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:573:2531], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-07-08T11:58:58.267097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 17179871602 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-07-08T11:58:58.267115Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-07-08T11:58:58.267137Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 449 RawX2: 17179871602 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-07-08T11:58:58.267147Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 449 RawX2: 17179871602 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-07-08T11:58:58.267157Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:58.267160Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:58.267164Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:58:58.267171Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-07-08T11:58:58.267210Z node 4 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:58.267724Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:58.267801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T11:58:58.267809Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-07-08T11:58:58.267820Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:58.267825Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:58.267829Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T11:58:58.267833Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:58.267837Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-07-08T11:58:58.267863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:125:2151] message: TxId: 281474976710759 2025-07-08T11:58:58.267870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T11:58:58.267875Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-07-08T11:58:58.267879Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-07-08T11:58:58.267905Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:58:58.268320Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-07-08T11:58:58.268334Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-07-08T11:58:58.268799Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:58:58.268812Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:595:2550] TestWaitNotification: OK eventTxId 102 >> TxUsage::WriteToTopic_Demo_12 >> TIcNodeCache::GetNodesInfoTest [GOOD] >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-07-08T11:58:56.544785Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:56.547291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:56.547332Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:56.547830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:56.547874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:56.547898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:56.547911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:56.547922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:56.547934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:56.547947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:56.547957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:56.547967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:56.547978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.547989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:56.548002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:56.552152Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:56.552387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:56.552404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:56.552430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.552499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:56.552512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:56.552518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:56.552528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:56.552536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:56.552543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:56.552548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:56.552565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.552573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:56.552580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:56.552585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:56.552594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:56.552601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:56.552608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:56.552613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:56.552622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:56.552629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:56.552634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:56.552657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:56.552664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:56.552669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:56.552692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:56.552699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:56.552704Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:56.552717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:56.552724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.552729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.552752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:56.552760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:56.552767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:56.552771Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:56.552805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:58:56.552814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:58:56.552822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:58:56.552833Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:56.552843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:56.552855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:56.552862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:56.552867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:56.552881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... ethod=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:58:58.400641Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:58:58.400666Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:58:58.400671Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:58:58.400676Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-07-08T11:58:58.400688Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-07-08T11:58:58.400693Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:58:58.400702Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.400707Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-07-08T11:58:58.400715Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:58:58.400774Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:58.400790Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.400795Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:58.400804Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-07-08T11:58:58.400813Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-07-08T11:58:58.400848Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[2:414:2431];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-07-08T11:58:58.400859Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.400869Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.400878Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.436817Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:58.436868Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.436878Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.436888Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:415:2432] finished for tablet 9437184 2025-07-08T11:58:58.437000Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[2:414:2431];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.037}],"full":{"a":1751975938399248,"name":"_full_task","f":1751975938399248,"d_finished":0,"c":0,"l":1751975938436903,"d":37655},"events":[{"name":"bootstrap","f":1751975938399286,"d_finished":294,"c":1,"l":1751975938399580,"d":294},{"a":1751975938436799,"name":"ack","f":1751975938400771,"d_finished":35892,"c":1,"l":1751975938436663,"d":35996},{"a":1751975938436788,"name":"processing","f":1751975938399707,"d_finished":36622,"c":10,"l":1751975938436665,"d":36737},{"name":"ProduceResults","f":1751975938399453,"d_finished":36091,"c":13,"l":1751975938436882,"d":36091},{"a":1751975938436884,"name":"Finish","f":1751975938436884,"d_finished":0,"c":0,"l":1751975938436903,"d":19},{"name":"task_result","f":1751975938399710,"d_finished":709,"c":9,"l":1751975938400723,"d":709}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.437019Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[2:414:2431];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:58.437055Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[2:414:2431];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.037}],"full":{"a":1751975938399248,"name":"_full_task","f":1751975938399248,"d_finished":0,"c":0,"l":1751975938437025,"d":37777},"events":[{"name":"bootstrap","f":1751975938399286,"d_finished":294,"c":1,"l":1751975938399580,"d":294},{"a":1751975938436799,"name":"ack","f":1751975938400771,"d_finished":35892,"c":1,"l":1751975938436663,"d":36118},{"a":1751975938436788,"name":"processing","f":1751975938399707,"d_finished":36622,"c":10,"l":1751975938436665,"d":36859},{"name":"ProduceResults","f":1751975938399453,"d_finished":36091,"c":13,"l":1751975938436882,"d":36091},{"a":1751975938436884,"name":"Finish","f":1751975938436884,"d_finished":0,"c":0,"l":1751975938437025,"d":141},{"name":"task_result","f":1751975938399710,"d_finished":709,"c":9,"l":1751975938400723,"d":709}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-07-08T11:58:58.437076Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:58.399149Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-07-08T11:58:58.437082Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:58.437152Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:415:2432];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> TColumnShardTestReadWrite::WriteExoticTypes |64.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_parts [GOOD] |64.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole2of2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-07-08T11:58:50.304570Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679323784613832:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:50.304653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:50.305868Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679322975834165:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:50.305887Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000a97/r3tmp/tmp2izNTm/pdisk_1.dat 2025-07-08T11:58:50.341386Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:50.347402Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:50.369279Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61755, node 1 2025-07-08T11:58:50.397214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/000a97/r3tmp/yandexRAaQ2T.tmp 2025-07-08T11:58:50.397230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/000a97/r3tmp/yandexRAaQ2T.tmp 2025-07-08T11:58:50.401245Z INFO: TTestServer started on Port 9123 GrpcPort 61755 2025-07-08T11:58:50.405104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.405138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.406630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9123 PQClient connected to localhost:61755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:50.423966Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/000a97/r3tmp/yandexRAaQ2T.tmp 2025-07-08T11:58:50.424123Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:50.441745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:50.441765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:50.443148Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T11:58:50.443176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.443474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-07-08T11:58:50.471511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:50.692208Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679323784614677:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.692651Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY3MDA2MmItYjVkY2FlYmQtMTc2YmYyNDUtNjYxMzk2Nzc=, ActorId: [1:7524679323784614674:2291], ActorState: ExecuteState, TraceId: 01jzmyeasxf8gx9mdqkqqmwv8h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.692808Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679322975834455:2267], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:50.692893Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDBmNmQzNWEtYTNlZjUwMTMtNDBmYzVlNjItMzIxMDY4NzQ=, ActorId: [2:7524679322975834453:2266], ActorState: ExecuteState, TraceId: 01jzmyeas87xzkf1qvr57cqw2q, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:50.693068Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.693079Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:50.695411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.761390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:50.784218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T11:58:50.825968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyeaxk6r39j7g5dv69rst9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzE1MTg5MWQtNDIxM2JkNTUtZmUwZTE3Y2YtMmYyNDgyY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679323784615109:2979] 2025-07-08T11:58:51.305020Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:51.307590Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:55.303217Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679323784613832:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:55.303253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:58:55.306290Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679322975834165:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:55.306329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2025-07-08T11:58:58.812487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:58.815256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:58.815307Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:58.815852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:58.815897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:58.815923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:58.815938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:58.815950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:58.815963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:58.815977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:58.815989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:58.816000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:58.816011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.816024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:58.816036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:58.828986Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:58.829249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:58.829262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:58.829288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.829337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:58.829348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:58.829352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:58.829359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:58.829366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:58.829372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:58.829376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:58.829395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.829404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:58.829412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:58.829416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:58.829426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:58.829432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:58.829437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:58.829440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:58.829446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:58.829451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:58.829455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:58.829473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:58.829478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:58.829481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:58.829495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:58.829501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:58.829504Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:58.829513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:58.829519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.829521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.829526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:58.829531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:58.829537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:58.829540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:58.829568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:58.829576Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:58.829582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:58.829591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:58.829599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:58.829607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:58.829614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:58.829619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:58.829629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:58.829632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... d=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:59.427552Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:100;schema=level: int32 timestamp: timestamp[us];);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:59.427569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;);columns=2;rows=100; 2025-07-08T11:58:59.427578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1200;num_rows=100;batch_columns=level,timestamp; 2025-07-08T11:58:59.427634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:265:2282];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2025-07-08T11:58:59.427648Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427667Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:59.427698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:273:2290] finished for tablet 9437184 2025-07-08T11:58:59.427780Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:265:2282];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1751975939424449,"name":"_full_task","f":1751975939424449,"d_finished":0,"c":0,"l":1751975939427722,"d":3273},"events":[{"name":"bootstrap","f":1751975939424500,"d_finished":658,"c":1,"l":1751975939425158,"d":658},{"a":1751975939427686,"name":"ack","f":1751975939427528,"d_finished":142,"c":1,"l":1751975939427670,"d":178},{"a":1751975939427684,"name":"processing","f":1751975939425481,"d_finished":1376,"c":10,"l":1751975939427670,"d":1414},{"name":"ProduceResults","f":1751975939424914,"d_finished":339,"c":13,"l":1751975939427708,"d":339},{"a":1751975939427709,"name":"Finish","f":1751975939427709,"d_finished":0,"c":0,"l":1751975939427722,"d":13},{"name":"task_result","f":1751975939425486,"d_finished":1209,"c":9,"l":1751975939427506,"d":1209}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427790Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:265:2282];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:59.427825Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:265:2282];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.003}],"full":{"a":1751975939424449,"name":"_full_task","f":1751975939424449,"d_finished":0,"c":0,"l":1751975939427795,"d":3346},"events":[{"name":"bootstrap","f":1751975939424500,"d_finished":658,"c":1,"l":1751975939425158,"d":658},{"a":1751975939427686,"name":"ack","f":1751975939427528,"d_finished":142,"c":1,"l":1751975939427670,"d":251},{"a":1751975939427684,"name":"processing","f":1751975939425481,"d_finished":1376,"c":10,"l":1751975939427670,"d":1487},{"name":"ProduceResults","f":1751975939424914,"d_finished":339,"c":13,"l":1751975939427708,"d":339},{"a":1751975939427709,"name":"Finish","f":1751975939427709,"d_finished":0,"c":0,"l":1751975939427795,"d":86},{"name":"task_result","f":1751975939425486,"d_finished":1209,"c":9,"l":1751975939427506,"d":1209}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:58:59.427838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:59.424251Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-07-08T11:58:59.427844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:59.427878Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:273:2290];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2025-07-08T11:58:59.428002Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-07-08T11:58:59.428052Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {1751975939818:100} readable: {1751975939818:max} at tablet 9437184 2025-07-08T11:58:59.428087Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-07-08T11:58:59.428149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1751975939818:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-07-08T11:58:59.428162Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1751975939818:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:96;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2025-07-08T11:58:59.428179Z node 1 :TX_COLUMNSHARD_SCAN WARN: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1751975939818:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; >> TColumnShardTestReadWrite::WriteOverload-InStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-07-08T11:58:58.093750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:58.098341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:58.098392Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:58.099148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:58.099200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:58.099235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:58.099257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:58.099274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:58.099296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:58.099314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:58.099332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:58.099348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:58.099365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.099382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:58.099400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:58.105663Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:58.105866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:58.105879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:58.105909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.105954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:58.105971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:58.105977Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:58.105988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:58.105998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:58.106005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:58.106010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:58.106034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.106043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:58.106051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:58.106056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:58.106066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:58.106074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:58.106082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:58.106086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:58.106095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:58.106103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:58.106107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:58.106131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:58.106139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:58.106144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:58.106165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:58.106173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:58.106177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:58.106190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:58.106197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.106202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.106209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:58.106217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:58.106224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:58.106228Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:58.106263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:58.106272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:58.106281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:58.106292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:58.106301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:58.106313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:58.106320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:58.106325Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:58.106338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:58.106344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... ge=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:58:59.654567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:58:59.654577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:58:59.654611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-07-08T11:58:59.654625Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:58:59.654688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654702Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:392:2408] finished for tablet 9437184 2025-07-08T11:58:59.654748Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:391:2407];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975939653418,"name":"_full_task","f":1751975939653418,"d_finished":0,"c":0,"l":1751975939654709,"d":1291},"events":[{"name":"bootstrap","f":1751975939653443,"d_finished":258,"c":1,"l":1751975939653701,"d":258},{"a":1751975939654677,"name":"ack","f":1751975939654526,"d_finished":132,"c":1,"l":1751975939654658,"d":164},{"a":1751975939654675,"name":"processing","f":1751975939653794,"d_finished":565,"c":10,"l":1751975939654658,"d":599},{"name":"ProduceResults","f":1751975939653594,"d_finished":282,"c":13,"l":1751975939654700,"d":282},{"a":1751975939654700,"name":"Finish","f":1751975939654700,"d_finished":0,"c":0,"l":1751975939654709,"d":9},{"name":"task_result","f":1751975939653796,"d_finished":419,"c":9,"l":1751975939654500,"d":419}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654757Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:58:59.654791Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:391:2407];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975939653418,"name":"_full_task","f":1751975939653418,"d_finished":0,"c":0,"l":1751975939654762,"d":1344},"events":[{"name":"bootstrap","f":1751975939653443,"d_finished":258,"c":1,"l":1751975939653701,"d":258},{"a":1751975939654677,"name":"ack","f":1751975939654526,"d_finished":132,"c":1,"l":1751975939654658,"d":217},{"a":1751975939654675,"name":"processing","f":1751975939653794,"d_finished":565,"c":10,"l":1751975939654658,"d":652},{"name":"ProduceResults","f":1751975939653594,"d_finished":282,"c":13,"l":1751975939654700,"d":282},{"a":1751975939654700,"name":"Finish","f":1751975939654700,"d_finished":0,"c":0,"l":1751975939654762,"d":62},{"name":"task_result","f":1751975939653796,"d_finished":419,"c":9,"l":1751975939654500,"d":419}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:58:59.654801Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:58:59.653339Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-07-08T11:58:59.654807Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:58:59.654836Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TSequenceReboots::CopyTableWithSequence [GOOD] >> Normalizers::PortionsNormalizer >> EvWrite::WriteWithLock [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-07-08T11:58:57.526412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:57.529775Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:57.529819Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:57.530475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:57.530531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:57.530565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:57.530586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:57.530603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:57.530619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:57.530641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:57.530659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:57.530674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:57.530692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.530714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:57.530732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:57.536903Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:57.537008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:57.537022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:57.537064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.537109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:57.537123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:57.537129Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:57.537140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:57.537150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:57.537158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:57.537163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:57.537182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.537191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:57.537199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:57.537204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:57.537215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:57.537222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:57.537231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:57.537235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:57.537245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:57.537253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:57.537260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:57.537288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:57.537295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:57.537300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:57.537322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:57.537331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:57.537335Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:57.537350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:57.537359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.537363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.537373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:57.537381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:57.537388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:57.537393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:57.537436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-07-08T11:58:57.537450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=9; 2025-07-08T11:58:57.537459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:57.537473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:58:57.537484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:57.537497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:57.537507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:57.537513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:57.537528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:57.537534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... tage=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235417Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:00.235424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:59:00.235431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:59:00.235458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:59:00.235466Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:00.235504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235512Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:970:2830] finished for tablet 9437184 2025-07-08T11:59:00.235546Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:969:2829];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975940233993,"name":"_full_task","f":1751975940233993,"d_finished":0,"c":0,"l":1751975940235516,"d":1523},"events":[{"name":"bootstrap","f":1751975940234022,"d_finished":306,"c":1,"l":1751975940234328,"d":306},{"a":1751975940235497,"name":"ack","f":1751975940235393,"d_finished":89,"c":1,"l":1751975940235482,"d":108},{"a":1751975940235496,"name":"processing","f":1751975940234481,"d_finished":665,"c":10,"l":1751975940235482,"d":685},{"name":"ProduceResults","f":1751975940234201,"d_finished":286,"c":13,"l":1751975940235510,"d":286},{"a":1751975940235511,"name":"Finish","f":1751975940235511,"d_finished":0,"c":0,"l":1751975940235516,"d":5},{"name":"task_result","f":1751975940234483,"d_finished":561,"c":9,"l":1751975940235359,"d":561}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:00.235574Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:969:2829];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975940233993,"name":"_full_task","f":1751975940233993,"d_finished":0,"c":0,"l":1751975940235556,"d":1563},"events":[{"name":"bootstrap","f":1751975940234022,"d_finished":306,"c":1,"l":1751975940234328,"d":306},{"a":1751975940235497,"name":"ack","f":1751975940235393,"d_finished":89,"c":1,"l":1751975940235482,"d":148},{"a":1751975940235496,"name":"processing","f":1751975940234481,"d_finished":665,"c":10,"l":1751975940235482,"d":725},{"name":"ProduceResults","f":1751975940234201,"d_finished":286,"c":13,"l":1751975940235510,"d":286},{"a":1751975940235511,"name":"Finish","f":1751975940235511,"d_finished":0,"c":0,"l":1751975940235556,"d":45},{"name":"task_result","f":1751975940234483,"d_finished":561,"c":9,"l":1751975940235359,"d":561}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:00.235582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:00.233896Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-07-08T11:59:00.235586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:00.235607Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-07-08T11:58:59.706728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:59.709188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:59.709228Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:59.709680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:59.709721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:59.709742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:59.709756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:59.709768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:59.709778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:59.709790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:59.709801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:59.709812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:59.709822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.709833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:59.709847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:59.715129Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:59.715191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:59.715198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:59.715231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.715262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:59.715273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:59.715276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:59.715282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:59.715288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:59.715293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:59.715296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:59.715307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.715312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:59.715317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:59.715319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:59.715325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:59.715329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:59.715333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:59.715336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:59.715341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:59.715346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:59.715348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:59.715363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:59.715367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:59.715370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:59.715383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:59.715389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:59.715393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:59.715405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:59.715412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.715415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.715422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:59.715429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:59.715434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:59.715438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:59.715473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:59.715479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:58:59.715486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:59.715494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-07-08T11:58:59.715501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:59.715509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:59.715515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:59.715519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:59.715527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:59.715531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... roduce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:00.303839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-07-08T11:59:00.303849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-07-08T11:59:00.303900Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:296:2313];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-07-08T11:59:00.303914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.303924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.303931Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:00.303935Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:00.304021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:00.304031Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:00.304043Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-07-08T11:59:00.304049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-07-08T11:59:00.304071Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:296:2313];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-07-08T11:59:00.304080Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304094Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304171Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:00.304179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:297:2314] finished for tablet 9437184 2025-07-08T11:59:00.304260Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:296:2313];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.009}],"full":{"a":1751975940294592,"name":"_full_task","f":1751975940294592,"d_finished":0,"c":0,"l":1751975940304201,"d":9609},"events":[{"name":"bootstrap","f":1751975940294628,"d_finished":591,"c":1,"l":1751975940295219,"d":591},{"a":1751975940304169,"name":"ack","f":1751975940303806,"d_finished":222,"c":2,"l":1751975940304109,"d":254},{"a":1751975940304167,"name":"processing","f":1751975940295563,"d_finished":5292,"c":18,"l":1751975940304109,"d":5326},{"name":"ProduceResults","f":1751975940294933,"d_finished":589,"c":22,"l":1751975940304188,"d":589},{"a":1751975940304189,"name":"Finish","f":1751975940304189,"d_finished":0,"c":0,"l":1751975940304201,"d":12},{"name":"task_result","f":1751975940295569,"d_finished":5024,"c":16,"l":1751975940303778,"d":5024}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304271Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:296:2313];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:00.304305Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:296:2313];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.009}],"full":{"a":1751975940294592,"name":"_full_task","f":1751975940294592,"d_finished":0,"c":0,"l":1751975940304277,"d":9685},"events":[{"name":"bootstrap","f":1751975940294628,"d_finished":591,"c":1,"l":1751975940295219,"d":591},{"a":1751975940304169,"name":"ack","f":1751975940303806,"d_finished":222,"c":2,"l":1751975940304109,"d":330},{"a":1751975940304167,"name":"processing","f":1751975940295563,"d_finished":5292,"c":18,"l":1751975940304109,"d":5402},{"name":"ProduceResults","f":1751975940294933,"d_finished":589,"c":22,"l":1751975940304188,"d":589},{"a":1751975940304189,"name":"Finish","f":1751975940304189,"d_finished":0,"c":0,"l":1751975940304277,"d":88},{"name":"task_result","f":1751975940295569,"d_finished":5024,"c":16,"l":1751975940303778,"d":5024}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:00.304321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:00.294476Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2025-07-08T11:59:00.304327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:00.304430Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:297:2314];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> TColumnShardTestReadWrite::ReadGroupBy |64.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> Normalizers::PortionsNormalizer [GOOD] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-07-08T11:58:59.799294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:59.803421Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:59.803482Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:59.804143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:59.804199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:59.804230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:59.804263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:59.804279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:59.804297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:59.804314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:59.804342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:59.804370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:59.804391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.804408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:59.804432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:59.809893Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:59.810069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:59.810081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:59.810108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.810147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:59.810160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:59.810166Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:59.810175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:59.810183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:59.810190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:59.810194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:59.810215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.810222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:59.810229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:59.810233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:59.810242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:59.810248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:59.810255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:59.810259Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:59.810267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:59.810273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:59.810278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:59.810301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:59.810307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:59.810311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:59.810331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:59.810338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:59.810342Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:59.810354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:59.810361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.810364Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.810372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:59.810379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:59.810386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:59.810390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:59.810424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:58:59.810433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:59.810441Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:59.810453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:58:59.810463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:59.810473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:59.810480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:59.810485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:59.810498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:59.810503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-07-08T11:59:00.555385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:00.559692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:00.559760Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:00.560484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-07-08T11:59:00.560538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-07-08T11:59:00.560556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.560599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:00.560621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:00.560638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:00.560658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:00.560675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:00.560693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:00.560711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:00.560729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:00.560750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.560772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:00.560789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:00.572684Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:00.572772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-07-08T11:59:00.572785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.572830Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-07-08T11:59:00.572868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.572879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.572884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.572925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:00.572932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:59:00.572939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=6; 2025-07-08T11:59:00.572969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-07-08T11:59:00.572985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:00.572995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:59:00.573002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:59:00.573016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.573025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:00.573030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:00.573032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:59:00.573038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:00.573043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:00.573048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:00.573051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:00.573062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.573068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:00.573073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:00.573075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:00.573082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:00.573086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:00.573091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:00.573093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:00.573099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:00.573103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:00.573106Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:00.573111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:00.573117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:00.573119Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:00.573132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:00.573137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:00.573139Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:00.573147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:00.573152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.5 ... .373818Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:01.373839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:01.373844Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:01.373848Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:01.373854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:01.373867Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:59:01.373876Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:01.373883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:01.373888Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:01.373902Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:01.373909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:01.422730Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {1751975941599:111} readable: {1751975941599:max} at tablet 9437184 2025-07-08T11:59:01.422796Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-07-08T11:59:01.422874Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1751975941599:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-07-08T11:59:01.422887Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1751975941599:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:96;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-07-08T11:59:01.423067Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1751975941599:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[{"from":8}]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":8,"inputs":[]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"8":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"0","t":"ReserveMemory"},"w":0,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"i":"0","p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-07-08T11:59:01.423106Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1751975941599:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:142;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-07-08T11:59:01.423229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:360:2373];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1751975941599:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:169;event=TTxScan started;actor_id=[1:430:2434];trace_detailed=; 2025-07-08T11:59:01.423333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-07-08T11:59:01.423357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-07-08T11:59:01.423426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:01.423442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:01.423450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:01.423458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:430:2434] finished for tablet 9437184 2025-07-08T11:59:01.423508Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:428:2433];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1751975941423212,"name":"_full_task","f":1751975941423212,"d_finished":0,"c":0,"l":1751975941423466,"d":254},"events":[{"name":"bootstrap","f":1751975941423254,"d_finished":132,"c":1,"l":1751975941423386,"d":132},{"a":1751975941423420,"name":"ack","f":1751975941423420,"d_finished":0,"c":0,"l":1751975941423466,"d":46},{"a":1751975941423416,"name":"processing","f":1751975941423416,"d_finished":0,"c":0,"l":1751975941423466,"d":50},{"name":"ProduceResults","f":1751975941423381,"d_finished":26,"c":2,"l":1751975941423453,"d":26},{"a":1751975941423454,"name":"Finish","f":1751975941423454,"d_finished":0,"c":0,"l":1751975941423466,"d":12}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:01.423520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:428:2433];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:01.423550Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:428:2433];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1751975941423212,"name":"_full_task","f":1751975941423212,"d_finished":0,"c":0,"l":1751975941423525,"d":313},"events":[{"name":"bootstrap","f":1751975941423254,"d_finished":132,"c":1,"l":1751975941423386,"d":132},{"a":1751975941423420,"name":"ack","f":1751975941423420,"d_finished":0,"c":0,"l":1751975941423525,"d":105},{"a":1751975941423416,"name":"processing","f":1751975941423416,"d_finished":0,"c":0,"l":1751975941423525,"d":109},{"name":"ProduceResults","f":1751975941423381,"d_finished":26,"c":2,"l":1751975941423453,"d":26},{"a":1751975941423454,"name":"Finish","f":1751975941423454,"d_finished":0,"c":0,"l":1751975941423525,"d":71}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:01.423567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:01.423098Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-07-08T11:59:01.423573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:01.423582Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2434];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> Normalizers::CleanUnusedTablesNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> Normalizers::CleanUnusedTablesNormalizer [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms >> LocalPartition::Restarts [GOOD] >> LocalPartition::DescribeBadPartition >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-07-08T11:58:23.308149Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.308185Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.313994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.314031Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.329684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.329843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=10677318817839159798, session=0, seqNo=0) 2025-07-08T11:58:23.329885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.353638Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=10677318817839159798, session=1) 2025-07-08T11:58:23.353747Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=9295356314045912835, session=0, seqNo=0) 2025-07-08T11:58:23.353782Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:23.365577Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=9295356314045912835, session=2) 2025-07-08T11:58:23.365694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:144:2168], cookie=8117776106174155445, name="Sem1", limit=1) 2025-07-08T11:58:23.365743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:58:23.377225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:144:2168], cookie=8117776106174155445) 2025-07-08T11:58:23.377318Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-07-08T11:58:23.377363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:58:23.377398Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:133:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-07-08T11:58:23.392472Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=111) 2025-07-08T11:58:23.392502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:133:2159], cookie=222) 2025-07-08T11:58:23.392632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:152:2176], cookie=2695752185061730658, name="Sem1") 2025-07-08T11:58:23.392662Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:152:2176], cookie=2695752185061730658) 2025-07-08T11:58:23.392718Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:155:2179], cookie=12656600440687183779, name="Sem1") 2025-07-08T11:58:23.392725Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:155:2179], cookie=12656600440687183779) 2025-07-08T11:58:23.817045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.833477Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.213101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.225843Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.636865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.648181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.994835Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.007745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.373281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.393301Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.793355Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.813647Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.181116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.200861Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.589747Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.605962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.993101Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.009670Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.433096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.449253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.846180Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.861515Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.222674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.238370Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.610000Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.620968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.985136Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.001620Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.415241Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.429317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.806987Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.820940Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.228755Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.249067Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.665118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.677895Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.048559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.061274Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.479071Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.493806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.885119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.905274Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.280850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.293540Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.684806Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.697381Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.069121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.089256Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.483841Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.496513Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.873137Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.885785Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.277299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.289749Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.677474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.697780Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.109140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.121749Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.528711Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.545387Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.921098Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.941316Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.333079Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.349461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.695519Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.706596Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.085107Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.098029Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.472693Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.483833Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.849385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.861932Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.240243Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.256126Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.607492Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.619108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.001139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.017382Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.481139Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.497762Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.907745Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.921303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.291541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.317279Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.737147Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.749741Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.145165Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.160191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.577088Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.5957 ... : [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:54.335585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:54.682239Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:54.697338Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:55.046560Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:55.057582Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:55.424640Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:55.435541Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:55.783450Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:55.796748Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.143241Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.154075Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.504295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.515233Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.870871Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.882036Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.229752Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.240681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.586681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.597593Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.935405Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.946365Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.292154Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.303146Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.652896Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.664432Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.068148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.081284Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.432115Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.443084Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.789795Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.801070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.149882Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.161813Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.508709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.519681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.871023Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.881935Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.239444Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.251909Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.598149Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.609919Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.956917Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.967832Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.315533Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.326445Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.652937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:59:02.652980Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:59:02.652991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-07-08T11:59:02.663956Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:59:02.674403Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:412:2412], cookie=3163529486327993467, name="Sem1") 2025-07-08T11:59:02.674443Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:412:2412], cookie=3163529486327993467) 2025-07-08T11:59:02.926666Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:02.926696Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:02.931112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:02.931164Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:02.952498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:02.952641Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=983876505446643095, session=0, seqNo=0) 2025-07-08T11:59:02.952683Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:59:02.963542Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=983876505446643095, session=1) 2025-07-08T11:59:02.963631Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=12821465282923657763, session=0, seqNo=0) 2025-07-08T11:59:02.963665Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:59:02.974513Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=12821465282923657763, session=2) 2025-07-08T11:59:02.974622Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=6692304329604271473, session=0, seqNo=0) 2025-07-08T11:59:02.974659Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-07-08T11:59:02.985415Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=6692304329604271473, session=3) 2025-07-08T11:59:02.985553Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=5080285956235323902, name="Sem1", limit=3) 2025-07-08T11:59:02.985587Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:59:02.996284Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=5080285956235323902) 2025-07-08T11:59:02.996366Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=111, session=1, semaphore="Sem1" count=2) 2025-07-08T11:59:02.996407Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:59:02.996447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2025-07-08T11:59:02.996457Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-07-08T11:59:02.996472Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=333, session=3, semaphore="Sem1" count=1) 2025-07-08T11:59:03.007243Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=111) 2025-07-08T11:59:03.007272Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=222) 2025-07-08T11:59:03.007278Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=333) 2025-07-08T11:59:03.007391Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=9789712812484098396, name="Sem1") 2025-07-08T11:59:03.007408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=9789712812484098396) 2025-07-08T11:59:03.007460Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=6580651506644439761, name="Sem1") 2025-07-08T11:59:03.007467Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=6580651506644439761) 2025-07-08T11:59:03.007498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=444, session=1, semaphore="Sem1" count=1) 2025-07-08T11:59:03.007533Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-07-08T11:59:03.018272Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=444) 2025-07-08T11:59:03.018412Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=12573248936916206123, name="Sem1") 2025-07-08T11:59:03.018428Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=12573248936916206123) 2025-07-08T11:59:03.018467Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=15011316999415350893, name="Sem1") 2025-07-08T11:59:03.018471Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=15011316999415350893) 2025-07-08T11:59:03.020499Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:03.020518Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:03.020553Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:03.020661Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:03.062818Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:03.062865Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:59:03.062873Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-07-08T11:59:03.062878Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-07-08T11:59:03.062975Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:205:2219], cookie=3624986640336643646, name="Sem1") 2025-07-08T11:59:03.062994Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:205:2219], cookie=3624986640336643646) 2025-07-08T11:59:03.063112Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2227], cookie=6592956704714423119, name="Sem1") 2025-07-08T11:59:03.063119Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2227], cookie=6592956704714423119) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2025-07-08T11:59:02.146045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:02.150215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:02.150276Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:02.151019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanIndexColumns; 2025-07-08T11:59:02.151067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:02.151126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:02.151151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:02.151170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:02.151188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:02.151204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:02.151225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:02.151242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:02.151259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:02.151277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.151298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:02.151320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:02.157638Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:02.157726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=CleanIndexColumns; 2025-07-08T11:59:02.157740Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:59:02.157844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanIndexColumns;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:02.157862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:59:02.157869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:59:02.157887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:02.157900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:02.157909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:02.157913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:59:02.157923Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:02.157932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:02.157939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:02.157943Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:02.157961Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:02.157969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:02.157976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:02.157980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:02.157992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:02.158000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:02.158007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:02.158011Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:02.158020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:02.158028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:02.158033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:02.158042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:02.158049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:02.158053Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:02.158077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:02.158085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:02.158089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:02.158103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:02.158111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.158115Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.158123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:02.158131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:02.158135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:02.158143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:02.158151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:02.158156Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:02.158171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:02.158177Z node 1 :TX_COLUMNSHARD WARN: t ... r=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:03.063938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:03.064204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:03.064212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:03.064219Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-07-08T11:59:03.064233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-07-08T11:59:03.064242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:03.064252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.064257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-07-08T11:59:03.064263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:03.064307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:03.064327Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.064333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:03.064344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-07-08T11:59:03.064355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-07-08T11:59:03.064388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:505:2510];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-07-08T11:59:03.064402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.064414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.064422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.064962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:03.064991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.065004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.065014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:510:2514] finished for tablet 9437184 2025-07-08T11:59:03.065104Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:505:2510];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.042},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.043}],"full":{"a":1751975943021300,"name":"_full_task","f":1751975943021300,"d_finished":0,"c":0,"l":1751975943065031,"d":43731},"events":[{"name":"bootstrap","f":1751975943021333,"d_finished":317,"c":1,"l":1751975943021650,"d":317},{"a":1751975943064937,"name":"ack","f":1751975943064302,"d_finished":122,"c":1,"l":1751975943064424,"d":216},{"a":1751975943064930,"name":"processing","f":1751975943022023,"d_finished":29909,"c":16,"l":1751975943064425,"d":30010},{"name":"ProduceResults","f":1751975943021507,"d_finished":437,"c":19,"l":1751975943065009,"d":437},{"a":1751975943065010,"name":"Finish","f":1751975943065010,"d_finished":0,"c":0,"l":1751975943065031,"d":21},{"name":"task_result","f":1751975943022026,"d_finished":29745,"c":15,"l":1751975943064272,"d":29745}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.065121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:505:2510];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:03.065167Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:505:2510];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["l_task_result"],"t":0.042},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.043}],"full":{"a":1751975943021300,"name":"_full_task","f":1751975943021300,"d_finished":0,"c":0,"l":1751975943065128,"d":43828},"events":[{"name":"bootstrap","f":1751975943021333,"d_finished":317,"c":1,"l":1751975943021650,"d":317},{"a":1751975943064937,"name":"ack","f":1751975943064302,"d_finished":122,"c":1,"l":1751975943064424,"d":313},{"a":1751975943064930,"name":"processing","f":1751975943022023,"d_finished":29909,"c":16,"l":1751975943064425,"d":30107},{"name":"ProduceResults","f":1751975943021507,"d_finished":437,"c":19,"l":1751975943065009,"d":437},{"a":1751975943065010,"name":"Finish","f":1751975943065010,"d_finished":0,"c":0,"l":1751975943065128,"d":118},{"name":"task_result","f":1751975943022026,"d_finished":29745,"c":15,"l":1751975943064272,"d":29745}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:03.065188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:03.021209Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=2776560;inserted_portions_bytes=0;committed_portions_bytes=2488696;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265256;selected_rows=0; 2025-07-08T11:59:03.065197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:03.065255Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:510:2514];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TKesusTest::TestAcquireSemaphore [GOOD] >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:50.552535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:50.552558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:50.552564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:50.552570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:50.552576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:50.552580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:50.552590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:50.552607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:50.552695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:50.566285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:50.566311Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:50.570548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:50.570603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:50.570633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:50.572709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:50.572844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:50.572961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:50.573013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:50.573454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:50.573490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:50.573720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:50.573729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:50.573759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:50.573767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:50.573773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:50.573810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:50.575175Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:50.594514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:50.594581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.594634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:50.594673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:50.594680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:50.595407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:50.595418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:50.595422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:50.595699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:50.595908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.595918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:50.595923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:50.596306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:50.596554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:50.596585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:50.596729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:50.596746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:50.596751Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:50.596810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:50.596815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:50.596836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:50.596845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:50.597162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:50.597169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:50.597205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:50.597208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:50.597266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:50.597271Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:50.597280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:50.597283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:50.597287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:50.597289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:50.597291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:50.597295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2025-07-08T11:59:03.549956Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:03.549971Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 214748366958 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:03.549976Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2025-07-08T11:59:03.550008Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:03.550025Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2025-07-08T11:59:03.550050Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:03.550058Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:03.550063Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:03.550192Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.550496Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T11:59:03.550930Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:03.550938Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:03.550963Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:03.550978Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:03.550996Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:03.551000Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-07-08T11:59:03.551004Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-07-08T11:59:03.551007Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-07-08T11:59:03.551066Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T11:59:03.551074Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-07-08T11:59:03.551086Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:03.551089Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:03.551094Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:03.551097Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:03.551101Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T11:59:03.551113Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:03.551117Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T11:59:03.551121Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T11:59:03.551133Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:03.551136Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:03.551141Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-07-08T11:59:03.551148Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:59:03.551151Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:03.551155Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:03.551209Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551218Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551222Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:03.551226Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:03.551229Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:03.551302Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:03.551309Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:03.551317Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:03.551391Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551400Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551403Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:03.551407Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:59:03.551411Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:03.551664Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551679Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.551683Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:03.551686Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:03.551690Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:03.551704Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T11:59:03.553115Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.553173Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:03.553199Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:03.553468Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-07-08T11:59:03.553536Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T11:59:03.553546Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-07-08T11:59:03.553614Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-07-08T11:59:03.553632Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T11:59:03.553637Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:387:2378] TestWaitNotification: OK eventTxId 1004 2025-07-08T11:59:03.553753Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:03.553785Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 42us result status StatusPathDoesNotExist 2025-07-08T11:59:03.553817Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-07-08T11:58:23.265968Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.266004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.270025Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.270066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.293306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.293499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=10357877314396535251, session=0, seqNo=0) 2025-07-08T11:58:23.293549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.325244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=10357877314396535251, session=1) 2025-07-08T11:58:23.325353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=4935887143515790349, session=0, seqNo=0) 2025-07-08T11:58:23.325386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:58:23.341672Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=4935887143515790349, session=2) 2025-07-08T11:58:23.341845Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-07-08T11:58:23.341920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-07-08T11:58:23.341934Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:23.341975Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=222, session=2, semaphore="Lock2" count=1) 2025-07-08T11:58:23.341985Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-07-08T11:58:23.341992Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-07-08T11:58:23.342006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=333, session=1, semaphore="Lock2" count=1) 2025-07-08T11:58:23.342013Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-07-08T11:58:23.357674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=111) 2025-07-08T11:58:23.357701Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=222) 2025-07-08T11:58:23.357706Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=333) 2025-07-08T11:58:23.357834Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:147:2171], cookie=14974384932796118734, name="Lock1") 2025-07-08T11:58:23.357863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:147:2171], cookie=14974384932796118734) 2025-07-08T11:58:23.357912Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:150:2174], cookie=15343036051468299844, name="Lock2") 2025-07-08T11:58:23.357918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:150:2174], cookie=15343036051468299844) 2025-07-08T11:58:23.367121Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.367146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.367200Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.367302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.414459Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.414508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-07-08T11:58:23.414516Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-07-08T11:58:23.414520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-07-08T11:58:23.414649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:190:2204], cookie=2107748474891453225, name="Lock1") 2025-07-08T11:58:23.414671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:190:2204], cookie=2107748474891453225) 2025-07-08T11:58:23.414779Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:198:2211], cookie=17453629588644817100, name="Lock2") 2025-07-08T11:58:23.414787Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:198:2211], cookie=17453629588644817100) 2025-07-08T11:58:23.857090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.873261Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.291131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.303258Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.668024Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.679015Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.073161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.084570Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.477119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.490157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.895821Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.907852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.267415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.278395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.652584Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.668796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.037525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.049283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.465816Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.478504Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.856317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.869680Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.248011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.261431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.627920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.642607Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.995536Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.013511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.461103Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.477253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.881094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.917362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.329160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.346327Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.747350Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.761572Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.124285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.135893Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.562343Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.573311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.964704Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.977281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.393082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.409250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.806606Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.823069Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.216770Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.237316Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.610404Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.625342Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.021181Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.042815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.451836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.463285Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.825129Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.845321Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.243277Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.258361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.685216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2367], cookie=10918493148386100659, name="Lock1") 2025-07-08T11:58:35.685260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2367], cookie=10918493148386100659) 2025-07-08T11:58:35.685333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:369:2370], cookie=9050263299375027294, name="Lock2") 2025-07-08T11:58:35.685339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:369:2370], cookie=9050263299375027294) 2025-07-08T11:58:35.733130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.749279Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.162045Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.181505Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.548587Z node 1 :KESUS_TABLET DEBUG: [7205 ... 08T11:58:56.046206Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.412979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.423874Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.782462Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.797487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.153939Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.164878Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.516743Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.527748Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.901890Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.912805Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.272175Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.283137Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.620050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.630991Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.979893Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.990757Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.344099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.355008Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.724867Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.736448Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.092849Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.103777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.449487Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.460520Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.818045Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.828942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.188708Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.199620Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.545681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.556576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.904329Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.915348Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.273929Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.284965Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.634816Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.645821Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.981937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.992849Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:03.334903Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:59:03.334948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:59:03.334958Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-07-08T11:59:03.334987Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:59:03.334996Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-07-08T11:59:03.335010Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:59:03.345789Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:59:03.345953Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:332:2345], cookie=6146309070048563241, name="Lock1") 2025-07-08T11:59:03.345972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:332:2345], cookie=6146309070048563241) 2025-07-08T11:59:03.346023Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:335:2348], cookie=8412208442542260103, name="Lock2") 2025-07-08T11:59:03.346028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:335:2348], cookie=8412208442542260103) 2025-07-08T11:59:03.346065Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:338:2351], cookie=15974986010134526836) 2025-07-08T11:59:03.346071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:338:2351], cookie=15974986010134526836) 2025-07-08T11:59:03.348818Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:03.348844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:03.348881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:03.349053Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:03.392633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:03.392672Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-07-08T11:59:03.392681Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-07-08T11:59:03.392769Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:378:2381], cookie=10093648043072466804) 2025-07-08T11:59:03.392786Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:378:2381], cookie=10093648043072466804) 2025-07-08T11:59:03.392904Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:385:2387], cookie=15872010606803324697, name="Lock1") 2025-07-08T11:59:03.392914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:385:2387], cookie=15872010606803324697) 2025-07-08T11:59:03.393070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:388:2390], cookie=16086412771064931103, name="Lock2") 2025-07-08T11:59:03.393077Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:388:2390], cookie=16086412771064931103) 2025-07-08T11:59:03.657658Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:03.657691Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:03.661617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:03.661671Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:03.685392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:03.685546Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=5105918543325232409, session=0, seqNo=0) 2025-07-08T11:59:03.685594Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:59:03.696464Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=5105918543325232409, session=1) 2025-07-08T11:59:03.696549Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:133:2159], cookie=7682853405904456508, session=0, seqNo=0) 2025-07-08T11:59:03.696598Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-07-08T11:59:03.708550Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:133:2159], cookie=7682853405904456508, session=2) 2025-07-08T11:59:03.708652Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2025-07-08T11:59:03.719647Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=111) 2025-07-08T11:59:03.719790Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=8337447892281737594, name="Sem1", limit=1) 2025-07-08T11:59:03.719825Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-07-08T11:59:03.730788Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=8337447892281737594) 2025-07-08T11:59:03.730897Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-07-08T11:59:03.741816Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=333) 2025-07-08T11:59:03.741921Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=222, session=1, semaphore="Sem1" count=1) 2025-07-08T11:59:03.741965Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-07-08T11:59:03.742007Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:133:2159], cookie=333, session=2, semaphore="Sem1" count=1) 2025-07-08T11:59:03.753457Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=222) 2025-07-08T11:59:03.753488Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:133:2159], cookie=333) 2025-07-08T11:59:03.753620Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=16695075313819534703, name="Sem1") 2025-07-08T11:59:03.753642Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=16695075313819534703) 2025-07-08T11:59:03.753702Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2183], cookie=4152379322617745874, name="Sem1") 2025-07-08T11:59:03.753709Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2183], cookie=4152379322617745874) 2025-07-08T11:59:03.753757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:162:2186], cookie=15123023192447238963, name="Sem1", force=0) 2025-07-08T11:59:03.765855Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:162:2186], cookie=15123023192447238963) 2025-07-08T11:59:03.766001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:167:2191], cookie=15196851289408963921, name="Sem1", force=1) 2025-07-08T11:59:03.766027Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-07-08T11:59:03.776976Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:167:2191], cookie=15196851289408963921) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-07-08T11:59:03.275603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:03.278422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:03.278473Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:03.279065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:03.279126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:03.279166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:03.279184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:03.279196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:03.279208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:03.279223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:03.279234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:03.279245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:03.279256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.279270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:03.279290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:03.284572Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:03.284771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:03.284784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:03.284813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:03.284863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:03.284879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:03.284885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:03.284895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:03.284903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:03.284910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:03.284914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:03.284936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:03.284943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:03.284965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:03.284969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:03.284979Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:03.284986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:03.284993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:03.284997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:03.285006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:03.285012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:03.285016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:03.285040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:03.285046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:03.285051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:03.285073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:03.285080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:03.285084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:03.285098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:03.285106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.285110Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.285117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:03.285124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:03.285131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:03.285135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:03.285175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:59:03.285185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:03.285194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:03.285205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:03.285216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:03.285227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:03.285234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:03.285239Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:03.285251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:03.285257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... ode 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:03.496534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:03.496538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;problem=Background activities cannot be started: no index at tablet; 2025-07-08T11:59:03.771571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=75258510344032;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-07-08T11:59:03.771596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=75258510344032;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2025-07-08T11:59:03.782265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;this=75258510344032;op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_this=75258558702592;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2025-07-08T11:59:03.782291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;this=75258510344032;op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_this=75258558702592;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:100:2135]; 2025-07-08T11:59:03.782300Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;this=75258510344032;op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1751975944274;max=18446744073709551615;plan=0;src=[1:100:2135];cookie=00:0;;int_this=75258558702592;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2025-07-08T11:59:03.782359Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-07-08T11:59:03.782398Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1751975944274 at tablet 9437184, mediator 0 2025-07-08T11:59:03.782406Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-07-08T11:59:03.782472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=1;result=not_found; 2025-07-08T11:59:03.782504Z node 1 :TX_COLUMNSHARD INFO: EnsureTable for pathId: {internal: 9438184000001, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2025-07-08T11:59:03.783458Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:03.783487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:304;method=RegisterTable;path_id=9438184000001; 2025-07-08T11:59:03.783500Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=9438184000001; 2025-07-08T11:59:03.784271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:463;event=OnTieringModified;path_id=9438184000001; 2025-07-08T11:59:03.784306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:215;event=finished_tx;tx_id=10; 2025-07-08T11:59:03.805499Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3200;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-07-08T11:59:03.806233Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=1;last=1; 2025-07-08T11:59:03.806248Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:14;writing_size=3200;operation_id=f15c62a0-5bf211f0-a7054f17-dd73d36;in_flight=1;size_in_flight=3200; 2025-07-08T11:59:03.808261Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:125:2157];write_id=1;path_id={internal: 9438184000001, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=3768;count=1;actions=__DEFAULT,;waiting=1;; 2025-07-08T11:59:03.808779Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=3200;event=data_write_finished;writing_id=f15c62a0-5bf211f0-a7054f17-dd73d36; 2025-07-08T11:59:03.808836Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=60;count=1; 2025-07-08T11:59:03.808850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=156;data_size=132;sum=156;count=2;size_of_meta=112; 2025-07-08T11:59:03.808862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=228;data_size=204;sum=228;count=1;size_of_portion=184; 2025-07-08T11:59:03.809044Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-07-08T11:59:03.809067Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=2;operation_id=1; 2025-07-08T11:59:03.819802Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-07-08T11:59:03.835318Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1751975944280 at tablet 9437184, mediator 0 2025-07-08T11:59:03.835342Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-07-08T11:59:03.835404Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-07-08T11:59:03.835433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:215;event=finished_tx;tx_id=100; 2025-07-08T11:59:03.846086Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-07-08T11:59:03.846126Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-07-08T11:59:03.846186Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:177;event=remove_by_insert_id;id=2;operation_id=1; 2025-07-08T11:59:03.846193Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:180;event=remove_operation;operation_id=1; 2025-07-08T11:59:03.846307Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:234;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:03.846316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:03.846338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:59:03.848919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:03.848938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:03.848961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:03.848987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:03.849121Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {1751975944280:100} readable: {1751975944280:max} at tablet 9437184 2025-07-08T11:59:03.860016Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-07-08T11:59:03.860086Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={1751975944280:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence_reboots/unittest >> TSequenceReboots::CopyTableWithSequence [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:56:56.405931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:56:56.405956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:56.405961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:56:56.405965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:56:56.405975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:56:56.405979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:56:56.405986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:56:56.406000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:56:56.406081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:56:56.431249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:56:56.431272Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:56:56.441463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:56:56.441543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:56:56.441571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:56:56.445024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:56:56.445214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:56:56.445322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:56.445376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:56:56.446355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:56.446396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:56:56.446626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:56.446637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:56.446659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:56:56.446666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:56.446672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:56:56.446710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:56:56.448033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:56:56.467292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:56:56.467360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.467412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:56:56.467447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:56:56.467456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:56:56.468245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:56:56.468266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:56:56.468270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:56:56.468613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:56:56.468889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.468902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:56.468908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:56:56.469454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:56:56.469799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:56:56.469832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:56:56.470003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:56:56.470022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:56:56.470028Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:56.470090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:56:56.470096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:56:56.470120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:56:56.470130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:56:56.470495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:56:56.470504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:56:56.470541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:56:56.470546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:56:56.470606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:56:56.470612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:56:56.470622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:56.470626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:56.470630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:56:56.470633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:56:56.470636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:56:56.470641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2025-07-08T11:59:00.182986Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence HandleReply TEvGetSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2025-07-08T11:59:00.182993Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence ProgressState sending TEvRestoreSequence to tablet 72075186233409546 operationId# 1003:3 at tablet 72057594046678944 2025-07-08T11:59:00.182998Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:00.183002Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:6 2025-07-08T11:59:00.183331Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-07-08T11:59:00.183338Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:00.183343Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 msg type: 276299787 2025-07-08T11:59:00.183399Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxRestoreSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Record# PathId { OwnerId: 72057594046678944 LocalId: 10 } TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2025-07-08T11:59:00.183423Z node 191 :SEQUENCESHARD NOTICE: [sequenceshard 72075186233409546] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Record# PathId { OwnerId: 72057594046678944 LocalId: 10 } TxId: 1003 TxPartId: 3 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 2 Cache: 1 Increment: 1 2025-07-08T11:59:00.204641Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxRestoreSequence.Complete 2025-07-08T11:59:00.204734Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 276299788, Sender [191:360:2341], Recipient [191:125:2151]: NKikimrTxSequenceShard.TEvRestoreSequenceResult Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-07-08T11:59:00.204744Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSequenceShard::TEvSequenceShard::TEvRestoreSequenceResult 2025-07-08T11:59:00.204751Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvRestoreSequenceResult, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-07-08T11:59:00.204782Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:3, at schemeshard: 72057594046678944, message: Status: SUCCESS Origin: 72075186233409546 TxId: 1003 TxPartId: 3 2025-07-08T11:59:00.204793Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TCopySequence TProposedCopySequence HandleReply TEvRestoreSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 1003:3 at tablet 72057594046678944 2025-07-08T11:59:00.204837Z node 191 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:3 140 -> 240 2025-07-08T11:59:00.204864Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:00.204874Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Ack tablet strongly msg opId: 1003:3 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:10 2025-07-08T11:59:00.205569Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:3, at schemeshard: 72057594046678944 2025-07-08T11:59:00.205585Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:00.205592Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1003:3 2025-07-08T11:59:00.205652Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [191:125:2151], Recipient [191:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T11:59:00.205658Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T11:59:00.205668Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:3, at schemeshard: 72057594046678944 2025-07-08T11:59:00.205677Z node 191 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:3 ProgressState 2025-07-08T11:59:00.205690Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:00.205697Z node 191 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 4/4 2025-07-08T11:59:00.205702Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-07-08T11:59:00.205707Z node 191 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:3 progress is 4/4 2025-07-08T11:59:00.205711Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-07-08T11:59:00.205717Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 4/4, is published: true 2025-07-08T11:59:00.205733Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [191:473:2425] message: TxId: 1003 2025-07-08T11:59:00.205740Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 4/4 2025-07-08T11:59:00.205748Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-07-08T11:59:00.205755Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-07-08T11:59:00.205794Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-07-08T11:59:00.205799Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T11:59:00.205805Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-07-08T11:59:00.205808Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-07-08T11:59:00.205814Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-07-08T11:59:00.205818Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-07-08T11:59:00.205822Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-07-08T11:59:00.205832Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-07-08T11:59:00.205836Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:59:00.205841Z node 191 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:3 2025-07-08T11:59:00.205845Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:3 2025-07-08T11:59:00.205852Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-07-08T11:59:00.205856Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-07-08T11:59:00.206268Z node 191 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:00.206287Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [191:473:2425] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 1003 at schemeshard: 72057594046678944 2025-07-08T11:59:00.206327Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T11:59:00.206333Z node 191 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [191:673:2596] 2025-07-08T11:59:00.206371Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [191:675:2598], Recipient [191:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:59:00.206376Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T11:59:00.206380Z node 191 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 2025-07-08T11:59:00.206463Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [191:775:2695], Recipient [191:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true } 2025-07-08T11:59:00.206472Z node 191 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-07-08T11:59:00.206482Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/copy/myseq" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:00.206534Z node 191 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/copy/myseq" took 45us result status StatusSuccess 2025-07-08T11:59:00.206617Z node 191 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/copy/myseq" PathDescription { Self { Name: "myseq" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 10 } Version: 1 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:00.207086Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute PathId# [OwnerId: 72057594046678944, LocalPathId: 10] Cache# 1 2025-07-08T11:59:00.207107Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 72057594046678944, LocalPathId: 10] AllocationStart# 2 AllocationCount# 1 AllocationIncrement# 1 2025-07-08T11:59:00.217959Z node 191 :SEQUENCESHARD TRACE: [sequenceshard 72075186233409546] TTxAllocateSequence.Complete |64.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |64.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TxUsage::Sinks_Oltp_WriteToTopic_4 [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD >> TxUsage::WriteToTopic_Demo_12 [GOOD] |64.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |64.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TxUsage::WriteToTopic_Demo_13 >> TxUsage::Sinks_Oltp_WriteToTopic_5 >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramLike |64.7%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::ReadWithProgram ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropExternalTableWithReboots [GOOD] >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:51.559475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:51.559495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:51.559500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:51.559504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:51.559509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:51.559513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:51.559521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:51.559537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:51.559606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:51.570404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:51.570427Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:51.573334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:51.573372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:51.573395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:51.574972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:51.575120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:51.575218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.575259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:51.575706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.575738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:51.575945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:51.575955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.575979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:51.575988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:51.575993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:51.576023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:51.577043Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:51.594858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:51.594912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.594972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:51.595012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:51.595021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.595599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.595623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:51.595661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.595669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:51.595673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:51.595679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:51.596073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.596085Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:51.596089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:51.596432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.596442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.596447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.596453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.597011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:51.597348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:51.597382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:51.597540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.597560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:51.597566Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.597634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:51.597642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.597665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:51.597676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:51.598009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:51.598016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:51.598044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.598047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:51.598094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.598100Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:51.598110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:51.598114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.598118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:51.598121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.598125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:51.598130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000007 2025-07-08T11:59:04.509768Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:04.509785Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 214748366958 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:04.509792Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000007 2025-07-08T11:59:04.509820Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:04.509830Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2025-07-08T11:59:04.509850Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:04.509858Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:04.509863Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:04.510013Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510257Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-07-08T11:59:04.510508Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:04.510514Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:04.510534Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:59:04.510546Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:04.510563Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:04.510568Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-07-08T11:59:04.510572Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2025-07-08T11:59:04.510575Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-07-08T11:59:04.510598Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T11:59:04.510604Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-07-08T11:59:04.510614Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T11:59:04.510617Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:04.510622Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T11:59:04.510624Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:04.510628Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-07-08T11:59:04.510633Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:04.510637Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2025-07-08T11:59:04.510640Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2025-07-08T11:59:04.510649Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:04.510652Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:04.510656Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-07-08T11:59:04.510660Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-07-08T11:59:04.510663Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:04.510666Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-07-08T11:59:04.510726Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510734Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510738Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:04.510742Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-07-08T11:59:04.510745Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:04.510797Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:04.510804Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:04.510811Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:04.510852Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510860Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510863Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:04.510867Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-07-08T11:59:04.510870Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:04.510956Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510964Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.510967Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:04.510971Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:04.510974Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:04.510982Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-07-08T11:59:04.511576Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.511619Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:04.511643Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:04.511833Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-07-08T11:59:04.511883Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-07-08T11:59:04.511889Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-07-08T11:59:04.511942Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-07-08T11:59:04.511956Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-07-08T11:59:04.511961Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [50:449:2440] TestWaitNotification: OK eventTxId 1005 2025-07-08T11:59:04.512025Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:04.512044Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 27us result status StatusPathDoesNotExist 2025-07-08T11:59:04.512070Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::ReadWithRestarts ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2025-07-08T11:59:00.212611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:00.215196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:00.215243Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:00.215773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:00.215816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:00.215841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:00.215856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:00.215868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:00.215881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:00.215893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:00.215905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:00.215916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:00.215927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.215939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:00.215951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:00.220466Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:00.220649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:00.220665Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:00.220691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.220727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:00.220740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:00.220744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:00.220750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:00.220757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:00.220762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:00.220765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:00.220780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.220786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:00.220791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:00.220793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:00.220800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:00.220804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:00.220809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:00.220812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:00.220817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:00.220823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:00.220825Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:00.220840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:00.220845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:00.220847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:00.220862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:00.220867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:00.220869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:00.220877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:00.220882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.220885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.220890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:00.220895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:00.220899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:00.220901Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:00.220929Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2025-07-08T11:59:00.220939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-07-08T11:59:00.220962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=2; 2025-07-08T11:59:00.220974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:00.220982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:00.220992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:00.220997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:00.221001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:00.221010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:00.221014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... HARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1692;count=35; 2025-07-08T11:59:04.279096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3420;count=36;size_of_meta=112; 2025-07-08T11:59:04.279107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=4716;count=18;size_of_portion=184; 2025-07-08T11:59:04.279323Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-07-08T11:59:04.279346Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=21;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=19;operation_id=18; 2025-07-08T11:59:04.289957Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 1 2025-07-08T11:59:04.291963Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=f0f994d6-5bf211f0-9b26f127-d11a6e42; 2025-07-08T11:59:04.292022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1786;count=37; 2025-07-08T11:59:04.292039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3610;count=38;size_of_meta=112; 2025-07-08T11:59:04.292050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=4978;count=19;size_of_portion=184; 2025-07-08T11:59:04.292256Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-07-08T11:59:04.292277Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=22;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=20;operation_id=19; 2025-07-08T11:59:04.302948Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 1 2025-07-08T11:59:04.304837Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=f11300ec-5bf211f0-a797bf4d-f73e2459; 2025-07-08T11:59:04.304912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1880;count=39; 2025-07-08T11:59:04.304931Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3800;count=40;size_of_meta=112; 2025-07-08T11:59:04.304942Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5240;count=20;size_of_portion=184; 2025-07-08T11:59:04.305191Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-07-08T11:59:04.305218Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=23;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=21;operation_id=20; 2025-07-08T11:59:04.316727Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 1 2025-07-08T11:59:04.318252Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=f126deaa-5bf211f0-a328253a-177009bb; 2025-07-08T11:59:04.318304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=1974;count=41; 2025-07-08T11:59:04.318320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=3990;count=42;size_of_meta=112; 2025-07-08T11:59:04.318330Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5502;count=21;size_of_portion=184; 2025-07-08T11:59:04.318536Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-07-08T11:59:04.318558Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=24;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=22;operation_id=21; 2025-07-08T11:59:04.329193Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 1 2025-07-08T11:59:04.331995Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:210;event=register_operation;operation_id=22;last=22; 2025-07-08T11:59:04.332018Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:14;writing_size=6330728;operation_id=f1ac9c02-5bf211f0-a4161b67-bf97e757;in_flight=1;size_in_flight=6330728; 2025-07-08T11:59:04.501465Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:125:2157];write_id=22;path_id={internal: 9438184000001, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2025-07-08T11:59:04.525567Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:105;writing_size=6330728;event=data_write_finished;writing_id=f1ac9c02-5bf211f0-a4161b67-bf97e757; 2025-07-08T11:59:04.525629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=2068;count=43; 2025-07-08T11:59:04.525646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=4180;count=44;size_of_meta=112; 2025-07-08T11:59:04.525657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=5764;count=22;size_of_portion=184; 2025-07-08T11:59:04.525885Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 2025-07-08T11:59:04.525911Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=25;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=23;operation_id=22; 2025-07-08T11:59:04.536899Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableAndDropWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:51.905943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:51.905970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:51.905976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:51.905983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:51.905989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:51.905994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:51.906004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:51.906024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:51.906113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:51.919085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:51.919104Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:51.922643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:51.922705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:51.922736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:51.924771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:51.924963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:51.925073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.925125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:51.925699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.925742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:51.925997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:51.926007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.926034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:51.926043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:51.926057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:51.926099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:51.927465Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:51.947405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:51.947469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.947526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:51.947576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:51.947587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.949364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.949396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:51.949437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.949446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:51.949452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:51.949457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:51.949974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.949988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:51.949994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:51.950460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.950472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.950479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.950487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.951131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:51.951617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:51.951660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:51.951856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:51.951883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:51.951891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.951983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:51.951992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:51.952020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:51.952032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:51.952658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:51.952667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:51.952707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:51.952713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:51.952777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:51.952785Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:51.952797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:51.952802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.952807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:51.952810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:51.952814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:51.952819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2025-07-08T11:59:05.102505Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:05.102525Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 214748366958 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:05.102533Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1006:0 HandleReply TEvOperationPlan: step# 5000007 2025-07-08T11:59:05.102565Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:05.102576Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 128 -> 240 2025-07-08T11:59:05.102597Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.102603Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:05.102607Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.102712Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.102951Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2025-07-08T11:59:05.103211Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:05.103217Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:05.103238Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:59:05.103252Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:05.103267Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:05.103271Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-07-08T11:59:05.103275Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-07-08T11:59:05.103279Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2025-07-08T11:59:05.103316Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T11:59:05.103322Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-07-08T11:59:05.103334Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T11:59:05.103338Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.103343Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T11:59:05.103346Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.103349Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-07-08T11:59:05.103354Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.103358Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2025-07-08T11:59:05.103362Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2025-07-08T11:59:05.103374Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:05.103378Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:05.103382Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-07-08T11:59:05.103385Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-07-08T11:59:05.103388Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:05.103391Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-07-08T11:59:05.103450Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103460Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103464Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.103468Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-07-08T11:59:05.103471Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:05.103533Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:05.103538Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:05.103546Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:05.103597Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103605Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103608Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.103611Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-07-08T11:59:05.103615Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.103711Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103718Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.103720Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.103724Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:05.103726Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.103732Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-07-08T11:59:05.104108Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.104141Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:05.104212Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.104256Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-07-08T11:59:05.104289Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-07-08T11:59:05.104293Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-07-08T11:59:05.104331Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-07-08T11:59:05.104341Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-07-08T11:59:05.104343Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [50:444:2435] TestWaitNotification: OK eventTxId 1006 2025-07-08T11:59:05.104387Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:05.104402Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 23us result status StatusPathDoesNotExist 2025-07-08T11:59:05.104427Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-07-08T11:58:56.795691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:56.798344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:56.798389Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:56.798891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-07-08T11:58:56.798922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.798952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:56.798965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:56.798978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:56.798990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:56.799001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:56.799012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:56.799035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:56.799046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:56.799057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.799070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:56.799083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:56.804205Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:56.804299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-07-08T11:58:56.804311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.804369Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2025-07-08T11:58:56.804414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-07-08T11:58:56.804424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:58:56.804429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:58:56.804440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.804448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:56.804454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:56.804456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:58:56.804462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:56.804467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:56.804471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:56.804474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:56.804486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:56.804490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:56.804495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:56.804497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:56.804504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:56.804508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:56.804513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:56.804515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:56.804521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:56.804528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:56.804530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:56.804534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:56.804539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:56.804542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:56.804553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:56.804558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:56.804560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:56.804567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:56.804572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.804574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:56.804579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:56.804583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:56.804585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:56.804590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:56.804596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:56.804599Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:56.804607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process= ... 184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:05.574503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:05.574837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:05.574845Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:05.574852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-07-08T11:59:05.574867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-07-08T11:59:05.574878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:05.574889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.574893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-07-08T11:59:05.574899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:05.574947Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:05.574973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.574979Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:05.574991Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-07-08T11:59:05.575003Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-07-08T11:59:05.575039Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:536:2538];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-07-08T11:59:05.575052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575063Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575070Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575564Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:05.575585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:541:2542] finished for tablet 9437184 2025-07-08T11:59:05.575676Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:536:2538];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.057}],"full":{"a":1751975945517722,"name":"_full_task","f":1751975945517722,"d_finished":0,"c":0,"l":1751975945575611,"d":57889},"events":[{"name":"bootstrap","f":1751975945517776,"d_finished":505,"c":1,"l":1751975945518281,"d":505},{"a":1751975945575558,"name":"ack","f":1751975945574941,"d_finished":132,"c":1,"l":1751975945575073,"d":185},{"a":1751975945575554,"name":"processing","f":1751975945518634,"d_finished":40707,"c":16,"l":1751975945575074,"d":40764},{"name":"ProduceResults","f":1751975945518075,"d_finished":492,"c":19,"l":1751975945575596,"d":492},{"a":1751975945575597,"name":"Finish","f":1751975945575597,"d_finished":0,"c":0,"l":1751975945575611,"d":14},{"name":"task_result","f":1751975945518638,"d_finished":40526,"c":15,"l":1751975945574908,"d":40526}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:536:2538];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:05.575725Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:536:2538];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.057}],"full":{"a":1751975945517722,"name":"_full_task","f":1751975945517722,"d_finished":0,"c":0,"l":1751975945575698,"d":57976},"events":[{"name":"bootstrap","f":1751975945517776,"d_finished":505,"c":1,"l":1751975945518281,"d":505},{"a":1751975945575558,"name":"ack","f":1751975945574941,"d_finished":132,"c":1,"l":1751975945575073,"d":272},{"a":1751975945575554,"name":"processing","f":1751975945518634,"d_finished":40707,"c":16,"l":1751975945575074,"d":40851},{"name":"ProduceResults","f":1751975945518075,"d_finished":492,"c":19,"l":1751975945575596,"d":492},{"a":1751975945575597,"name":"Finish","f":1751975945575597,"d_finished":0,"c":0,"l":1751975945575698,"d":101},{"name":"task_result","f":1751975945518638,"d_finished":40526,"c":15,"l":1751975945574908,"d":40526}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:05.575741Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:05.517544Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=2776560;inserted_portions_bytes=0;committed_portions_bytes=2488696;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265256;selected_rows=0; 2025-07-08T11:59:05.575746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:05.575791Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:541:2542];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::SimpleDropExternalTableWithReboots2 [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:52.655246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:52.655264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.655267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:52.655271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:52.655274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:52.655277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:52.655283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.655301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:52.655370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:52.668327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:52.668349Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.671670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:52.671708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:52.671735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:52.673495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:52.673655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:52.673759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.673795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:52.674762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.674797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:52.675010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.675019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.675040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:52.675049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.675055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:52.675084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:52.676327Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.695884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:52.695953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.696010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:52.696059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:52.696069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.697265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.697290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:52.697333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.697342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:52.697347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:52.697353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:52.697767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.697777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:52.697782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:52.698052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.698061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.698065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.698071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.698596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:52.698946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:52.698991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:52.699169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.699194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:52.699200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.699281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:52.699287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.699312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:52.699324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:52.699695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.699701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.699736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.699741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:52.699842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.699849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:52.699860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.699864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.699868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.699871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.699875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:52.699879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2025-07-08T11:59:05.605969Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:05.605985Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 214748366958 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:05.605992Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2025-07-08T11:59:05.606035Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:05.606049Z node 50 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2025-07-08T11:59:05.606068Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.606075Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:05.606080Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.606218Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.606606Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T11:59:05.607026Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:05.607034Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:05.607060Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:05.607074Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:05.607093Z node 50 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:05.607097Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-07-08T11:59:05.607101Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-07-08T11:59:05.607105Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [50:205:2207], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-07-08T11:59:05.607167Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T11:59:05.607174Z node 50 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-07-08T11:59:05.607197Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:05.607201Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:05.607205Z node 50 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:05.607208Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:05.607212Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T11:59:05.607216Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:05.607220Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T11:59:05.607223Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T11:59:05.607234Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:05.607238Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:05.607242Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-07-08T11:59:05.607246Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:59:05.607249Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:05.607252Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:05.607309Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607318Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607322Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:05.607326Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:05.607330Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:05.607398Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:05.607407Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:05.607414Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:05.607467Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607474Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607478Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:05.607482Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:59:05.607485Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.607763Z node 50 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607788Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.607792Z node 50 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:05.607797Z node 50 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:05.607800Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.607812Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T11:59:05.608387Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.608433Z node 50 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:05.608456Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:05.608669Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-07-08T11:59:05.608729Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T11:59:05.608735Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-07-08T11:59:05.608787Z node 50 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-07-08T11:59:05.608800Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T11:59:05.608804Z node 50 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [50:387:2378] TestWaitNotification: OK eventTxId 1004 2025-07-08T11:59:05.608919Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:05.608944Z node 50 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 31us result status StatusPathDoesNotExist 2025-07-08T11:59:05.609003Z node 50 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateDroppedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:52.456622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:52.456644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.456649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:52.456655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:52.456661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:52.456665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:52.456675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.456693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:52.456767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:52.467880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:52.467900Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.470957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:52.471000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:52.471026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:52.472456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:52.472612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:52.472716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.472750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:52.473240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.473266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:52.473454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.473462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.473485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:52.473492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.473496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:52.473525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:52.474543Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.487942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:52.487988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:52.488053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:52.488059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:52.488509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:52.488521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:52.488525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:52.488821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:52.488834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:52.490343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.490355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.490359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.490365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.490855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:52.491201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:52.491222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:52.491341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.491355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:52.491359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.491414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:52.491419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.491434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:52.491441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:52.491786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.491794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.491815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.491819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:52.491879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.491885Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:52.491895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.491899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.491903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.491906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.491910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:52.491914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 7-08T11:59:05.260433Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.260435Z node 49 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.260438Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-07-08T11:59:05.260441Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:59:05.260448Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 0/1, is published: true 2025-07-08T11:59:05.260735Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2025-07-08T11:59:05.260753Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000007 2025-07-08T11:59:05.260931Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:05.260963Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 210453399660 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:05.260973Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1006:0 HandleReply TEvOperationPlan: step# 5000007 2025-07-08T11:59:05.261008Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:05.261022Z node 49 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1006:0 128 -> 240 2025-07-08T11:59:05.261043Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.261050Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:05.261055Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.261193Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.261209Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 FAKE_COORDINATOR: Erasing txId 1006 2025-07-08T11:59:05.261809Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:05.261824Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:05.261857Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:59:05.261878Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:05.261900Z node 49 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:05.261905Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:206:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 1 2025-07-08T11:59:05.261910Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:206:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 5 2025-07-08T11:59:05.261913Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [49:206:2208], at schemeshard: 72057594046678944, txId: 1006, path id: 3 2025-07-08T11:59:05.261973Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T11:59:05.262011Z node 49 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1006:0 ProgressState 2025-07-08T11:59:05.262028Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T11:59:05.262032Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.262037Z node 49 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T11:59:05.262040Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.262044Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-07-08T11:59:05.262050Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T11:59:05.262055Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2025-07-08T11:59:05.262059Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2025-07-08T11:59:05.262079Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:05.262083Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:05.262088Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 3, subscribers: 0 2025-07-08T11:59:05.262092Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-07-08T11:59:05.262095Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:05.262098Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 18446744073709551615 2025-07-08T11:59:05.262187Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262198Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262203Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.262208Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-07-08T11:59:05.262212Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T11:59:05.262296Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:05.262303Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:05.262313Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:05.262371Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262379Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262383Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.262387Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-07-08T11:59:05.262390Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:05.262432Z node 49 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262440Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.262443Z node 49 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T11:59:05.262449Z node 49 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:05.262452Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:05.262476Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-07-08T11:59:05.263101Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.263416Z node 49 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:05.263433Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T11:59:05.263444Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-07-08T11:59:05.263506Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-07-08T11:59:05.263513Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-07-08T11:59:05.263586Z node 49 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-07-08T11:59:05.263601Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-07-08T11:59:05.263606Z node 49 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [49:445:2436] TestWaitNotification: OK eventTxId 1006 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-07-08T11:59:05.731331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.735689Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.735747Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.736520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.736588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.736628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.736652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.736670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.736693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.736712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.736731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.736748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.736767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.736786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.736809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.743070Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.743299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.743312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.743342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.743389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.743406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.743412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.743423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.743432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.743439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.743444Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.743468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.743477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.743484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.743488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.743498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.743506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.743513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.743518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.743527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.743534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.743538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.743564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.743571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.743575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.743612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.743620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.743624Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.743638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.743645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.743650Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.743659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.743666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.743673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.743677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.743716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:05.743726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:59:05.743735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:05.743747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:59:05.743758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.743770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.743777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.743783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.743795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.743801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... en=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:06.356799Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:06.356811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:06.356814Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:06.356817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-07-08T11:59:06.356823Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-07-08T11:59:06.356826Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:06.356831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-07-08T11:59:06.356838Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:06.356859Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:06.356866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:06.356874Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-07-08T11:59:06.356879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-07-08T11:59:06.356896Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:302:2319];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-07-08T11:59:06.356903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356925Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:06.356930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.356937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:303:2320] finished for tablet 9437184 2025-07-08T11:59:06.356991Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:302:2319];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish","f_task_result","l_task_result"],"t":0}],"full":{"a":1751975946356049,"name":"_full_task","f":1751975946356049,"d_finished":0,"c":0,"l":1751975946356941,"d":892},"events":[{"name":"bootstrap","f":1751975946356067,"d_finished":168,"c":1,"l":1751975946356235,"d":168},{"a":1751975946356924,"name":"ack","f":1751975946356856,"d_finished":60,"c":1,"l":1751975946356916,"d":77},{"a":1751975946356923,"name":"processing","f":1751975946356304,"d_finished":451,"c":9,"l":1751975946356916,"d":469},{"name":"ProduceResults","f":1751975946356159,"d_finished":165,"c":12,"l":1751975946356935,"d":165},{"a":1751975946356936,"name":"Finish","f":1751975946356936,"d_finished":0,"c":0,"l":1751975946356941,"d":5},{"name":"task_result","f":1751975946356305,"d_finished":381,"c":8,"l":1751975946356843,"d":381}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.357000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:302:2319];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:06.357035Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:302:2319];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish","f_task_result","l_task_result"],"t":0}],"full":{"a":1751975946356049,"name":"_full_task","f":1751975946356049,"d_finished":0,"c":0,"l":1751975946357008,"d":959},"events":[{"name":"bootstrap","f":1751975946356067,"d_finished":168,"c":1,"l":1751975946356235,"d":168},{"a":1751975946356924,"name":"ack","f":1751975946356856,"d_finished":60,"c":1,"l":1751975946356916,"d":144},{"a":1751975946356923,"name":"processing","f":1751975946356304,"d_finished":451,"c":9,"l":1751975946356916,"d":536},{"name":"ProduceResults","f":1751975946356159,"d_finished":165,"c":12,"l":1751975946356935,"d":165},{"a":1751975946356936,"name":"Finish","f":1751975946356936,"d_finished":0,"c":0,"l":1751975946357008,"d":72},{"name":"task_result","f":1751975946356305,"d_finished":381,"c":8,"l":1751975946356843,"d":381}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-07-08T11:59:06.357044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:06.356005Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-07-08T11:59:06.357049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:06.357073Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:303:2320];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-07-08T11:59:05.899779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.902857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.902900Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.903403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.903445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.903470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.903487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.903499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.903513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.903526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.903543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.903558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.903570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.903582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.903598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.908021Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.908190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.908198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.908216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.908254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.908265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.908268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.908275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.908281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.908285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.908288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.908303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.908308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.908313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.908315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.908322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.908326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.908331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.908333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.908339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.908344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.908347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.908362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.908367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.908369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.908384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.908390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.908392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.908402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.908409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.908413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.908421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.908429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.908435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.908438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.908463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:05.908469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:59:05.908475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:05.908483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:05.908491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.908499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.908504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.908507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.908516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.908520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... d;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:06.494548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:06.494557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.494562Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:06.494566Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:06.494574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:06.494579Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-07-08T11:59:06.494584Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-07-08T11:59:06.494590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8398003;count=1; 2025-07-08T11:59:06.494632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:152;event=DoExecute;interval_idx=0; 2025-07-08T11:59:06.494876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=0; 2025-07-08T11:59:06.494889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.494894Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:06.494898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:06.494917Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:06.494921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:06.494926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-07-08T11:59:06.494952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-07-08T11:59:06.494958Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:06.494968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.494978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.495008Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:06.495018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.495028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.495034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:287:2304] finished for tablet 9437184 2025-07-08T11:59:06.495082Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:286:2303];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975946493659,"name":"_full_task","f":1751975946493659,"d_finished":0,"c":0,"l":1751975946495040,"d":1381},"events":[{"name":"bootstrap","f":1751975946493685,"d_finished":305,"c":1,"l":1751975946493990,"d":305},{"a":1751975946495004,"name":"ack","f":1751975946495004,"d_finished":0,"c":0,"l":1751975946495040,"d":36},{"a":1751975946495003,"name":"processing","f":1751975946493998,"d_finished":590,"c":9,"l":1751975946494986,"d":627},{"name":"ProduceResults","f":1751975946493857,"d_finished":205,"c":11,"l":1751975946495031,"d":205},{"a":1751975946495031,"name":"Finish","f":1751975946495031,"d_finished":0,"c":0,"l":1751975946495040,"d":9},{"name":"task_result","f":1751975946494000,"d_finished":576,"c":9,"l":1751975946494986,"d":576}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.495091Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:286:2303];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:06.495130Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:286:2303];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975946493659,"name":"_full_task","f":1751975946493659,"d_finished":0,"c":0,"l":1751975946495097,"d":1438},"events":[{"name":"bootstrap","f":1751975946493685,"d_finished":305,"c":1,"l":1751975946493990,"d":305},{"a":1751975946495004,"name":"ack","f":1751975946495004,"d_finished":0,"c":0,"l":1751975946495097,"d":93},{"a":1751975946495003,"name":"processing","f":1751975946493998,"d_finished":590,"c":9,"l":1751975946494986,"d":684},{"name":"ProduceResults","f":1751975946493857,"d_finished":205,"c":11,"l":1751975946495031,"d":205},{"a":1751975946495031,"name":"Finish","f":1751975946495031,"d_finished":0,"c":0,"l":1751975946495097,"d":66},{"name":"task_result","f":1751975946494000,"d_finished":576,"c":9,"l":1751975946494986,"d":576}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-07-08T11:59:06.495140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:06.493569Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2025-07-08T11:59:06.495156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:06.495181Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:287:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; >> TColumnShardTestReadWrite::WriteStandalone >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> Normalizers::ChunksV0MetaNormalizer >> TErasureTypeTest::TestAllSpecies1of2 [GOOD] >> Backup::ProposeBackup >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 |64.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-07-08T11:59:05.870848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.875228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.875286Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.876067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.876128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.876163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.876187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.876206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.876227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.876245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.876262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.876279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.876297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.876315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.876333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.882789Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.883037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.883049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.883084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.883126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.883154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.883161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.883172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.883182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.883190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.883195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.883220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.883230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.883238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.883243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.883253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.883260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.883269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.883273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.883283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.883290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.883297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.883355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.883364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.883369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.883390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.883399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.883403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.883418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.883425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.883430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.883439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.883447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.883455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.883459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.883496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:05.883513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=11; 2025-07-08T11:59:05.883522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:05.883535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:59:05.883546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.883575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.883585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.883591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.883605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.883612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::CreateExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:49.461231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:49.461259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:49.461265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:49.461271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:49.461277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:49.461281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:49.461291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:49.461314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:49.461413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:49.474542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:49.474570Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:49.479255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:49.479314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:49.479346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:49.480884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:49.481102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:49.481183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:49.481218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:49.482300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:49.482345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:49.482563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:49.482571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:49.482591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:49.482596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:49.482600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:49.482631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:49.483719Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:49.500490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:49.500567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.500624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:49.500666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:49.500674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.501535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:49.501570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:49.501619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.501630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:49.501635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:49.501640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:49.502098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.502111Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:49.502117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:49.502433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.502441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.502445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:49.502451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:49.502835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:49.503198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:49.503231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:49.503386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:49.503406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:49.503411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:49.503472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:49.503477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:49.503502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:49.503510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:49.503841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:49.503846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:49.503886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:49.503889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:49.503948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:49.503953Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:49.503962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:49.503965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:49.503968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:49.503970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:49.503973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:49.503977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 44 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-07-08T11:59:07.315582Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2025-07-08T11:59:07.315645Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:07.315663Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 292057778285 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:07.315671Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 1004:0 HandleReply TEvOperationPlan: step# 5000005 2025-07-08T11:59:07.315691Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2025-07-08T11:59:07.315713Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:07.315721Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:07.315728Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T11:59:07.316155Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:07.316162Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:07.316183Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:59:07.316197Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T11:59:07.316204Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:07.316221Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:07.316226Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-07-08T11:59:07.316230Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2025-07-08T11:59:07.316233Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 5 2025-07-08T11:59:07.316237Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-07-08T11:59:07.316282Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T11:59:07.316288Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-07-08T11:59:07.316299Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:07.316303Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:07.316308Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:07.316311Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:07.316318Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T11:59:07.316323Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:07.316328Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T11:59:07.316331Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T11:59:07.316342Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:59:07.316345Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:07.316350Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 1 2025-07-08T11:59:07.316354Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:07.316358Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 5 2025-07-08T11:59:07.316361Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 5], 2 2025-07-08T11:59:07.316536Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.316548Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 5 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.316552Z node 68 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:07.316556Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 5 2025-07-08T11:59:07.316560Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:07.316684Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.316694Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.316698Z node 68 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:07.316701Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-07-08T11:59:07.316705Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:07.317014Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.317027Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.317031Z node 68 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:07.317037Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:07.317041Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:07.317052Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 1 2025-07-08T11:59:07.317057Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [68:304:2295] 2025-07-08T11:59:07.317711Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.317759Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.318081Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:07.318099Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T11:59:07.318104Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [68:335:2326] TestWaitNotification: OK eventTxId 1002 TestWaitNotification: OK eventTxId 1003 TestWaitNotification: OK eventTxId 1004 2025-07-08T11:59:07.318209Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirExternalTable/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:07.318240Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirExternalTable/ExternalTable" took 40us result status StatusSuccess 2025-07-08T11:59:07.318315Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirExternalTable/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "a" Type: "Int32" TypeId: 1 Id: 1 NotNull: true } Columns { Name: "b" Type: "Int32" TypeId: 1 Id: 2 NotNull: true } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |64.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |64.8%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |64.8%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |64.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-07-08T11:59:05.017592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.022160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.022228Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.023021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.023079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.023120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.023152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.023170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.023193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.023213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.023235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.023253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.023270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.023289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.023313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.029839Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.030069Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.030081Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.030112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.030158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.030173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.030178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.030187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.030195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.030203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.030207Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.030243Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.030252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.030259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.030263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.030272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.030279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.030286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.030289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.030298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.030305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.030308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.030333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.030340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.030344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.030367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.030374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.030378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.030390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.030396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.030400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.030407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.030415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.030421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.030425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.030466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:59:05.030477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-07-08T11:59:05.030486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=6; 2025-07-08T11:59:05.030500Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:59:05.030510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.030522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.030529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.030534Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.030548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.030553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... ge=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.284870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:08.284882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:59:08.284893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:59:08.284934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-07-08T11:59:08.284962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.284974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.284985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.285012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:08.285021Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.285030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.285035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:970:2830] finished for tablet 9437184 2025-07-08T11:59:08.285089Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:969:2829];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975948283118,"name":"_full_task","f":1751975948283118,"d_finished":0,"c":0,"l":1751975948285042,"d":1924},"events":[{"name":"bootstrap","f":1751975948283152,"d_finished":371,"c":1,"l":1751975948283523,"d":371},{"a":1751975948285010,"name":"ack","f":1751975948284839,"d_finished":149,"c":1,"l":1751975948284988,"d":181},{"a":1751975948285009,"name":"processing","f":1751975948283663,"d_finished":825,"c":10,"l":1751975948284988,"d":858},{"name":"ProduceResults","f":1751975948283372,"d_finished":366,"c":13,"l":1751975948285032,"d":366},{"a":1751975948285033,"name":"Finish","f":1751975948285033,"d_finished":0,"c":0,"l":1751975948285042,"d":9},{"name":"task_result","f":1751975948283667,"d_finished":657,"c":9,"l":1751975948284803,"d":657}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.285098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:969:2829];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:08.285133Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:969:2829];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975948283118,"name":"_full_task","f":1751975948283118,"d_finished":0,"c":0,"l":1751975948285103,"d":1985},"events":[{"name":"bootstrap","f":1751975948283152,"d_finished":371,"c":1,"l":1751975948283523,"d":371},{"a":1751975948285010,"name":"ack","f":1751975948284839,"d_finished":149,"c":1,"l":1751975948284988,"d":242},{"a":1751975948285009,"name":"processing","f":1751975948283663,"d_finished":825,"c":10,"l":1751975948284988,"d":919},{"name":"ProduceResults","f":1751975948283372,"d_finished":366,"c":13,"l":1751975948285032,"d":366},{"a":1751975948285033,"name":"Finish","f":1751975948285033,"d_finished":0,"c":0,"l":1751975948285103,"d":70},{"name":"task_result","f":1751975948283667,"d_finished":657,"c":9,"l":1751975948284803,"d":657}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:08.285145Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:08.283010Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2025-07-08T11:59:08.285151Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:08.285181Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:970:2830];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> TColumnShardTestReadWrite::WriteStandalone [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2025-07-08T11:59:08.185760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:08.190318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:08.190376Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:08.191130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.191189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:08.191234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:08.191254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:08.191275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:08.191294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:08.191313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:08.191336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:08.191355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:08.191373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:08.191389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.191409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.191427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:08.198387Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:08.198466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.198476Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:59:08.198518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:08.198533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:59:08.198540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:59:08.198584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.198597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:08.198605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:08.198609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:59:08.198618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:08.198626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:08.198694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:08.198703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:08.198722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.198731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:08.198738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:08.198742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:08.198752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:08.198759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:08.198766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:08.198770Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:08.198778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:08.198785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:08.198791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:08.198799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:08.198806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:08.198811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:08.198834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:08.198842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:08.198846Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:08.198860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:08.198867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.198871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.198878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:08.198885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:08.198889Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:08.198896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:08.198903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:08.198907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:08.198920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:08.198927Z node 1 :TX_COLUMNSHARD W ... 9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:09.098722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:09.099156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:09.099164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:09.099168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-07-08T11:59:09.099178Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-07-08T11:59:09.099199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:09.099208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099211Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-07-08T11:59:09.099215Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:09.099251Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:09.099267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:09.099278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-07-08T11:59:09.099287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-07-08T11:59:09.099313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:500:2505];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-07-08T11:59:09.099323Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099821Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:09.099844Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:502:2506] finished for tablet 9437184 2025-07-08T11:59:09.099936Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:500:2505];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.05}],"full":{"a":1751975949049090,"name":"_full_task","f":1751975949049090,"d_finished":0,"c":0,"l":1751975949099873,"d":50783},"events":[{"name":"bootstrap","f":1751975949049148,"d_finished":432,"c":1,"l":1751975949049580,"d":432},{"a":1751975949099814,"name":"ack","f":1751975949099247,"d_finished":90,"c":1,"l":1751975949099337,"d":149},{"a":1751975949099809,"name":"processing","f":1751975949049595,"d_finished":35078,"c":16,"l":1751975949099337,"d":35142},{"name":"ProduceResults","f":1751975949049420,"d_finished":364,"c":19,"l":1751975949099857,"d":364},{"a":1751975949099858,"name":"Finish","f":1751975949099858,"d_finished":0,"c":0,"l":1751975949099873,"d":15},{"name":"task_result","f":1751975949049598,"d_finished":34938,"c":15,"l":1751975949099223,"d":34938}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.099952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:500:2505];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:09.099990Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:500:2505];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.05}],"full":{"a":1751975949049090,"name":"_full_task","f":1751975949049090,"d_finished":0,"c":0,"l":1751975949099959,"d":50869},"events":[{"name":"bootstrap","f":1751975949049148,"d_finished":432,"c":1,"l":1751975949049580,"d":432},{"a":1751975949099814,"name":"ack","f":1751975949099247,"d_finished":90,"c":1,"l":1751975949099337,"d":235},{"a":1751975949099809,"name":"processing","f":1751975949049595,"d_finished":35078,"c":16,"l":1751975949099337,"d":35228},{"name":"ProduceResults","f":1751975949049420,"d_finished":364,"c":19,"l":1751975949099857,"d":364},{"a":1751975949099858,"name":"Finish","f":1751975949099858,"d_finished":0,"c":0,"l":1751975949099959,"d":101},{"name":"task_result","f":1751975949049598,"d_finished":34938,"c":15,"l":1751975949099223,"d":34938}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:09.100005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:09.048966Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=2776560;inserted_portions_bytes=0;committed_portions_bytes=2488696;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265256;selected_rows=0; 2025-07-08T11:59:09.100010Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:09.100064Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:502:2506];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> EvWrite::AbortInTransaction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-07-08T11:59:07.919549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:07.922718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:07.922764Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:07.923262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:07.923300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:07.923322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:07.923336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:07.923347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:07.923360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:07.923372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:07.923382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:07.923392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:07.923403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.923414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:07.923427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:07.927556Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:07.927716Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:07.927726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:07.927749Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:07.927783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:07.927793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:07.927796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:07.927802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:07.927808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:07.927813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:07.927816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:07.927831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:07.927837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:07.927841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:07.927844Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:07.927849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:07.927854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:07.927858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:07.927861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:07.927866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:07.927870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:07.927873Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:07.927888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:07.927892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:07.927895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:07.927914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:07.927922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:07.927926Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:07.927934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:07.927939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.927941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.927946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:07.927951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:07.927955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:07.927958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:07.927990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:07.928000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:59:07.928007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:59:07.928016Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:07.928023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:07.928031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:07.928036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:07.928040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:07.928048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:07.928052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... [{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"19,19,19,19,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"20,20,20,20,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TColumnShardTestReadWrite::WriteReadModifications >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::CompactionGC >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-07-08T11:59:08.192246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:08.196560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:08.196611Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:08.197304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:08.197362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:08.197397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:08.197423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:08.197442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:08.197460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:08.197481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:08.197499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:08.197518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:08.197535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.197554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.197576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:08.203591Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:08.203652Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:08.203662Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:08.203693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.203727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:08.203739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:08.203744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:08.203753Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:08.203761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:08.203768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:08.203772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:08.203788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.203795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:08.203802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:08.203805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:08.203814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:08.203821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:08.203827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:08.203831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:08.203839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:08.203846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:08.203851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:08.203876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:08.203883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:08.203887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:08.203907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:08.203915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:08.203919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:08.203932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:08.203938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.203942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.203950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:08.203958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:08.203964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:08.203968Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:08.204004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:59:08.204015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:08.204024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:08.204036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:08.204046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:08.204058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:08.204066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:08.204072Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:08.204085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:08.204090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... ts::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=240;count=7; 2025-07-08T11:59:09.594019Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:65;memory_size=156;data_size=132;sum=624;count=8;size_of_meta=112; 2025-07-08T11:59:09.594029Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:40;memory_size=228;data_size=204;sum=912;count=4;size_of_portion=184; 2025-07-08T11:59:09.594340Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 44 2025-07-08T11:59:09.594408Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:156;event=add_by_insert_id;id=2;operation_id=1; 2025-07-08T11:59:09.605325Z node 2 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 44 2025-07-08T11:59:09.605464Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-07-08T11:59:09.605478Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-07-08T11:59:09.605505Z node 2 :TX_COLUMNSHARD DEBUG: PlanStep 1751975950075 at tablet 9437184, mediator 0 2025-07-08T11:59:09.605512Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-07-08T11:59:09.605518Z node 2 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 1751975950075 last planned step 1751975950075 at tablet 9437184 2025-07-08T11:59:09.605525Z node 2 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-07-08T11:59:09.605574Z node 2 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1751975950075:max} readable: {1751975950075:max} at tablet 9437184 2025-07-08T11:59:09.605589Z node 2 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-07-08T11:59:09.606013Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975950075:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-07-08T11:59:09.606025Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975950075:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:96;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-07-08T11:59:09.606171Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975950075:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:44;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-07-08T11:59:09.606191Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975950075:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:142;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-07-08T11:59:09.606301Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[2:108:2140];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1751975950075:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:169;event=TTxScan started;actor_id=[2:175:2192];trace_detailed=; 2025-07-08T11:59:09.606390Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-07-08T11:59:09.606410Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-07-08T11:59:09.606453Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:09.606464Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:09.606471Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:09.606478Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:175:2192] finished for tablet 9437184 2025-07-08T11:59:09.606526Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[2:174:2191];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1751975949606288,"name":"_full_task","f":1751975949606288,"d_finished":0,"c":0,"l":1751975949606486,"d":198},"events":[{"name":"bootstrap","f":1751975949606321,"d_finished":107,"c":1,"l":1751975949606428,"d":107},{"a":1751975949606449,"name":"ack","f":1751975949606449,"d_finished":0,"c":0,"l":1751975949606486,"d":37},{"a":1751975949606446,"name":"processing","f":1751975949606446,"d_finished":0,"c":0,"l":1751975949606486,"d":40},{"name":"ProduceResults","f":1751975949606425,"d_finished":21,"c":2,"l":1751975949606475,"d":21},{"a":1751975949606475,"name":"Finish","f":1751975949606475,"d_finished":0,"c":0,"l":1751975949606486,"d":11}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:09.606538Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[2:174:2191];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:09.606569Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[2:174:2191];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0}],"full":{"a":1751975949606288,"name":"_full_task","f":1751975949606288,"d_finished":0,"c":0,"l":1751975949606543,"d":255},"events":[{"name":"bootstrap","f":1751975949606321,"d_finished":107,"c":1,"l":1751975949606428,"d":107},{"a":1751975949606449,"name":"ack","f":1751975949606449,"d_finished":0,"c":0,"l":1751975949606543,"d":94},{"a":1751975949606446,"name":"processing","f":1751975949606446,"d_finished":0,"c":0,"l":1751975949606543,"d":97},{"name":"ProduceResults","f":1751975949606425,"d_finished":21,"c":2,"l":1751975949606475,"d":21},{"a":1751975949606475,"name":"Finish","f":1751975949606475,"d_finished":0,"c":0,"l":1751975949606543,"d":68}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-07-08T11:59:09.606603Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:09.606185Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-07-08T11:59:09.606608Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:09.606618Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:175:2192];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5 [GOOD] >> TSchemeShardMoveTest::Chain >> TSchemeShardMoveTest::MoveIndex >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> TxUsage::WriteToTopic_Demo_13 [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::ParallelCreateDrop [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:52.908365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:52.908379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.908382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:52.908386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:52.908389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:52.908392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:52.908397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.908409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:52.908462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:52.916793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:52.916809Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.920525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:52.920564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:52.920590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:52.922230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:52.922368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:52.922476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.922510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:52.922933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.922965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:52.923168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.923177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.923199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:52.923208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.923213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:52.923243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:52.924349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.943158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:52.943224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.943281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:52.943329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:52.943339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:52.944177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:52.944191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:52.944195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:52.944620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:52.944974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.944990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.944996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.945552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:52.945939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:52.945973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:52.946145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.946169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:52.946176Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.946252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:52.946259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.946282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:52.946299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:52.946697Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.946705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.946739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.946744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:52.946803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.946809Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:52.946819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.946823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.946828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.946831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.946834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:52.946839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ce: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000005 2025-07-08T11:59:10.346117Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.346135Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 287762810990 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:10.346143Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1004:0 HandleReply TEvOperationPlan: step# 5000005 2025-07-08T11:59:10.346183Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:10.346203Z node 67 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 128 -> 240 2025-07-08T11:59:10.346231Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:10.346238Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:10.346244Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:10.346345Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.346582Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T11:59:10.347003Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:10.347015Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:10.347048Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:10.347060Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:10.347077Z node 67 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.347081Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:204:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-07-08T11:59:10.347084Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:204:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 4 2025-07-08T11:59:10.347086Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [67:204:2206], at schemeshard: 72057594046678944, txId: 1004, path id: 3 2025-07-08T11:59:10.347131Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.347137Z node 67 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1004:0 ProgressState 2025-07-08T11:59:10.347148Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:10.347152Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:10.347155Z node 67 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T11:59:10.347157Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:10.347160Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T11:59:10.347163Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T11:59:10.347167Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T11:59:10.347171Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T11:59:10.347184Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:10.347188Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:10.347194Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 3, subscribers: 0 2025-07-08T11:59:10.347212Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:59:10.347214Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:10.347216Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:10.347283Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347290Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347293Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:10.347296Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:10.347299Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:10.347343Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:10.347349Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:10.347358Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:10.347414Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347421Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347423Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:10.347425Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:59:10.347427Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:10.347468Z node 67 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347473Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.347475Z node 67 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T11:59:10.347477Z node 67 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:10.347481Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:10.347490Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T11:59:10.348452Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.348593Z node 67 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:10.348606Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T11:59:10.349147Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 TestModificationResult got TxId: 1004, wait until txId: 1004 TestWaitNotification wait txId: 1004 2025-07-08T11:59:10.349263Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T11:59:10.349273Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-07-08T11:59:10.349359Z node 67 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-07-08T11:59:10.349381Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T11:59:10.349386Z node 67 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [67:390:2381] TestWaitNotification: OK eventTxId 1004 2025-07-08T11:59:10.349468Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DropMe" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:10.349503Z node 67 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DropMe" took 46us result status StatusPathDoesNotExist 2025-07-08T11:59:10.349539Z node 67 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DropMe\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/DropMe" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:10.088556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:10.088585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:10.088591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:10.088595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:10.088601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:10.088606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:10.088615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:10.088629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:10.088705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:10.102636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:10.102663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:10.106942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:10.107004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:10.107037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:10.108521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:10.108565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:10.108664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.108861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:10.109657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.109697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:10.109923Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:10.109932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.109949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:10.109956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:10.109962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:10.109989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.111207Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:10.130545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:10.130635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.130710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:10.130763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:10.130773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.131688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.131716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:10.131764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.131774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:10.131780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:10.131785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:10.132147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.132156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:10.132160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:10.132482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.132491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.132496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.132503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.133136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:10.133529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:10.133568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:10.133750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.133772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:10.133783Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.133840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:10.133848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.133878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:10.133889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:10.134262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:10.134269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:10.134314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.134320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:10.134330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.134336Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:10.134347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:10.134352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.134357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:10.134360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.134365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:10.134370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.134376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:10.134380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:10.134389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:10.134395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:10.134400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:10.134788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:10.134801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 133 } } 2025-07-08T11:59:10.622487Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 133 } } FAKE_COORDINATOR: Erasing txId 103 2025-07-08T11:59:10.624462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-07-08T11:59:10.624496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624508Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:59:10.624517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 8589936895 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624533Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624544Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-07-08T11:59:10.624816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-07-08T11:59:10.624837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 321 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624841Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:59:10.624846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 321 RawX2: 8589936896 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-07-08T11:59:10.624850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624865Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:59:10.624871Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-07-08T11:59:10.625633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626586Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-07-08T11:59:10.626602Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-07-08T11:59:10.626607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-07-08T11:59:10.626611Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-07-08T11:59:10.626615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-07-08T11:59:10.626620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-07-08T11:59:10.626680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626739Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:10.626750Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-07-08T11:59:10.626753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-07-08T11:59:10.626758Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-07-08T11:59:10.626761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-07-08T11:59:10.626765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-07-08T11:59:10.626770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-07-08T11:59:10.626776Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T11:59:10.626780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T11:59:10.626804Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-07-08T11:59:10.626809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:59:10.626813Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-07-08T11:59:10.626816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-07-08T11:59:10.626819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-07-08T11:59:10.626821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:10.626823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-07-08T11:59:10.626825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-07-08T11:59:10.626829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-07-08T11:59:10.626833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:10.626917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:10.626921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:10.626929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:10.626933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:59:10.626936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:59:10.626939Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:10.626942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:10.628379Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:10.628614Z node 2 :TX_PROXY DEBUG: actor# [2:267:2258] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-07-08T11:59:10.662199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T11:59:10.662221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T11:59:10.662293Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T11:59:10.662308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:59:10.662313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:677:2562] TestWaitNotification: OK eventTxId 103 >> TxUsage::WriteToTopic_Demo_14 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-07-08T11:59:09.926950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:09.929651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:09.929695Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:09.930228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:09.930270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:09.930299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:09.930314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:09.930325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:09.930336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:09.930350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:09.930360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:09.930371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:09.930381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.930392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:09.930403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:09.935395Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:09.935617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:09.935625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:09.935645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.935692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:09.935708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:09.935714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:09.935722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:09.935728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:09.935733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:09.935736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:09.935752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.935757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:09.935762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:09.935764Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:09.935770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:09.935775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:09.935779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:09.935782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:09.935787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:09.935792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:09.935795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:09.935811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:09.935815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:09.935818Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:09.935832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:09.935836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:09.935839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:09.935847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:09.935852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.935854Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.935859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:09.935864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:09.935868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:09.935870Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:09.935902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:59:09.935917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=11; 2025-07-08T11:59:09.935926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:09.935936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:09.935943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:09.935951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:09.935956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:09.935960Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:09.935968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:09.935972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... or.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:10.833792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:10.833796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-07-08T11:59:10.833801Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-07-08T11:59:10.833809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8394164;count=4; 2025-07-08T11:59:10.833895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:152;event=DoExecute;interval_idx=0; 2025-07-08T11:59:10.833984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:52;event=source_ready;intervals_count=1;source_idx=3; 2025-07-08T11:59:10.833993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.833998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:10.834002Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:10.834011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:10.834020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:10.834028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:10.834047Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:10.834052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:10.834058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-07-08T11:59:10.834065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-07-08T11:59:10.834072Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:10.834081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:10.834140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:387:2404] finished for tablet 9437184 2025-07-08T11:59:10.834223Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:383:2400];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.004}],"full":{"a":1751975950829804,"name":"_full_task","f":1751975950829804,"d_finished":0,"c":0,"l":1751975950834164,"d":4360},"events":[{"name":"bootstrap","f":1751975950829855,"d_finished":733,"c":1,"l":1751975950830588,"d":733},{"a":1751975950834126,"name":"ack","f":1751975950834126,"d_finished":0,"c":0,"l":1751975950834164,"d":38},{"a":1751975950834123,"name":"processing","f":1751975950830757,"d_finished":939,"c":26,"l":1751975950834098,"d":980},{"name":"ProduceResults","f":1751975950830247,"d_finished":468,"c":28,"l":1751975950834150,"d":468},{"a":1751975950834151,"name":"Finish","f":1751975950834151,"d_finished":0,"c":0,"l":1751975950834164,"d":13},{"name":"task_result","f":1751975950830761,"d_finished":888,"c":26,"l":1751975950834098,"d":888}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:383:2400];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:10.834269Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:383:2400];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.004}],"full":{"a":1751975950829804,"name":"_full_task","f":1751975950829804,"d_finished":0,"c":0,"l":1751975950834240,"d":4436},"events":[{"name":"bootstrap","f":1751975950829855,"d_finished":733,"c":1,"l":1751975950830588,"d":733},{"a":1751975950834126,"name":"ack","f":1751975950834126,"d_finished":0,"c":0,"l":1751975950834240,"d":114},{"a":1751975950834123,"name":"processing","f":1751975950830757,"d_finished":939,"c":26,"l":1751975950834098,"d":1056},{"name":"ProduceResults","f":1751975950830247,"d_finished":468,"c":28,"l":1751975950834150,"d":468},{"a":1751975950834151,"name":"Finish","f":1751975950834151,"d_finished":0,"c":0,"l":1751975950834240,"d":89},{"name":"task_result","f":1751975950830761,"d_finished":888,"c":26,"l":1751975950834098,"d":888}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:10.834281Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:10.829691Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2025-07-08T11:59:10.834286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:10.834319Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:387:2404];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] >> TSchemeShardMoveTest::Index [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-07-08T11:59:09.872993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:09.875733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:09.875774Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:09.876300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:09.876338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:09.876365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:09.876380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:09.876392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:09.876406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:09.876418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:09.876429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:09.876439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:09.876450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.876465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:09.876485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:09.880824Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:09.880992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:09.881003Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:09.881029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.881064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:09.881074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:09.881078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:09.881084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:09.881089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:09.881094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:09.881097Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:09.881111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.881116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:09.881121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:09.881124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:09.881130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:09.881134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:09.881139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:09.881141Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:09.881146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:09.881151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:09.881153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:09.881168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:09.881172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:09.881175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:09.881188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:09.881193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:09.881195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:09.881203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:09.881208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.881210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.881215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:09.881220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:09.881229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:09.881232Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:09.881262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:09.881269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:59:09.881274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:59:09.881282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-07-08T11:59:09.881289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:09.881297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:09.881302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:09.881305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:09.881313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:09.881317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... ge=start;iterator=ready_results:(count:1;records_count:31;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.516867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:11.516877Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-07-08T11:59:11.516888Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-07-08T11:59:11.516924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-07-08T11:59:11.516936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.516965Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.516977Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.517004Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:11.517014Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.517023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.517028Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:392:2408] finished for tablet 9437184 2025-07-08T11:59:11.517076Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:391:2407];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975951515260,"name":"_full_task","f":1751975951515260,"d_finished":0,"c":0,"l":1751975951517035,"d":1775},"events":[{"name":"bootstrap","f":1751975951515295,"d_finished":355,"c":1,"l":1751975951515650,"d":355},{"a":1751975951517002,"name":"ack","f":1751975951516837,"d_finished":144,"c":1,"l":1751975951516981,"d":177},{"a":1751975951517000,"name":"processing","f":1751975951515782,"d_finished":772,"c":10,"l":1751975951516981,"d":807},{"name":"ProduceResults","f":1751975951515487,"d_finished":355,"c":13,"l":1751975951517025,"d":355},{"a":1751975951517026,"name":"Finish","f":1751975951517026,"d_finished":0,"c":0,"l":1751975951517035,"d":9},{"name":"task_result","f":1751975951515785,"d_finished":608,"c":9,"l":1751975951516810,"d":608}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.517086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:391:2407];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:11.517120Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:391:2407];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.001}],"full":{"a":1751975951515260,"name":"_full_task","f":1751975951515260,"d_finished":0,"c":0,"l":1751975951517091,"d":1831},"events":[{"name":"bootstrap","f":1751975951515295,"d_finished":355,"c":1,"l":1751975951515650,"d":355},{"a":1751975951517002,"name":"ack","f":1751975951516837,"d_finished":144,"c":1,"l":1751975951516981,"d":233},{"a":1751975951517000,"name":"processing","f":1751975951515782,"d_finished":772,"c":10,"l":1751975951516981,"d":863},{"name":"ProduceResults","f":1751975951515487,"d_finished":355,"c":13,"l":1751975951517025,"d":355},{"a":1751975951517026,"name":"Finish","f":1751975951517026,"d_finished":0,"c":0,"l":1751975951517091,"d":65},{"name":"task_result","f":1751975951515785,"d_finished":608,"c":9,"l":1751975951516810,"d":608}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-07-08T11:59:11.517130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:11.515146Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2025-07-08T11:59:11.517137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:11.517165Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:392:2408];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:11.054512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:11.054551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:11.054557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:11.054562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:11.054568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:11.054570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:11.054576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:11.054587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:11.054644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:11.065862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:11.065882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:11.069348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:11.069389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:11.069414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:11.077834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:11.077907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:11.077989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.078217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:11.079227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.079288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:11.079543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:11.079555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.079572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:11.079578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:11.079584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:11.079609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.081589Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:11.098614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:11.098685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.098740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:11.098788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:11.098797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.099427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.099461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:11.099499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.099509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:11.099514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:11.099518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:11.099935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.099945Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:11.099950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:11.100270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.100280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.100286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.100292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.100872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:11.101255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:11.101298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:11.101434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.101457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.101464Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.101526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:11.101533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.101560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:11.101572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:11.102047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:11.102056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:11.102100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.102106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:11.102118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.102123Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:11.102133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:11.102136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.102141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:11.102143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.102150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:11.102156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.102160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:11.102163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:11.102174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:11.102178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:11.102182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:11.102547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:11.102560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... imit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653589Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653607Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 20us result status StatusSuccess 2025-07-08T11:59:11.653657Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653698Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653722Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 27us result status StatusSuccess 2025-07-08T11:59:11.653842Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653899Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.653915Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 18us result status StatusSuccess 2025-07-08T11:59:11.653976Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::ResetCachedPath [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:10.952654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:10.952675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:10.952678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:10.952682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:10.952687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:10.952689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:10.952695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:10.952707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:10.952780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:10.963490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:10.963511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:10.966488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:10.966525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:10.966553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:10.967823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:10.967885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:10.968018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.968250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:10.969378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.969423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:10.969702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:10.969715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.969733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:10.969741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:10.969747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:10.969775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.971094Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:10.984817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:10.984902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.985007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:10.985069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:10.985081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.985957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.985980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:10.986032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.986043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:10.986048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:10.986053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:10.986433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.986441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:10.986446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:10.986717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.986724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.986728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.986732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.987097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:10.987445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:10.987485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:10.987671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:10.987696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:10.987706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.987766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:10.987773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:10.987801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:10.987813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:10.988240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:10.988249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:10.988288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:10.988291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:10.988299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:10.988303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:10.988312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:10.988314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.988317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:10.988319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.988322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:10.988325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:10.988328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:10.988330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:10.988340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:10.988344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:10.988347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:10.988741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:10.988755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... rId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822193Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822214Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 22us result status StatusSuccess 2025-07-08T11:59:11.822265Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822317Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822344Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 28us result status StatusSuccess 2025-07-08T11:59:11.822486Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822553Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:11.822575Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 23us result status StatusSuccess 2025-07-08T11:59:11.822662Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::TwoTables >> TSchemeShardMoveTest::MoveIndexSameDst >> TSchemeShardMoveTest::MoveMigratedTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:11.728025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:11.728052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:11.728058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:11.728062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:11.728068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:11.728072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:11.728080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:11.728093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:11.728174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:11.737633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:11.737652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:11.741551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:11.741607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:11.741639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:11.747327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:11.747387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:11.747480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.747733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:11.748719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.748758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:11.749020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:11.749031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.749048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:11.749054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:11.749059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:11.749081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.750419Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:11.769213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:11.769297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.769368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:11.769422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:11.769433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.770317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.770353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:11.770452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.770462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:11.770467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:11.770472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:11.771007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.771021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:11.771027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:11.771537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.771576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.771582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.771589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.772221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:11.772678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:11.772713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:11.772845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:11.772867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:11.772872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.772960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:11.772973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:11.772998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:11.773006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:11.776746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:11.776757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:11.776798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:11.776804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:11.776815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:11.776822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:11.776850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:11.776855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.776859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:11.776863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.776871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:11.776876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:11.776881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:11.776884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:11.776898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:11.776902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:11.776905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:11.777409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:11.777443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... Id, TxId: 105, tablet: 72075186233409549, partId: 0 2025-07-08T11:59:12.277394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 154000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 57 } } 2025-07-08T11:59:12.277410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 154000 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 57 } } 2025-07-08T11:59:12.277415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-07-08T11:59:12.277449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.277457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-07-08T11:59:12.278008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278060Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-07-08T11:59:12.278150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:12.278479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-07-08T11:59:12.278509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-07-08T11:59:12.278631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-07-08T11:59:12.278738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-07-08T11:59:12.278768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-07-08T11:59:12.281384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.281396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:12.281458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.281464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-07-08T11:59:12.281552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.281560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-07-08T11:59:12.281698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:12.281712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:12.281716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-07-08T11:59:12.281721Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-07-08T11:59:12.281727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:59:12.281743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-07-08T11:59:12.281871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2025-07-08T11:59:12.281878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-07-08T11:59:12.281894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2025-07-08T11:59:12.281906Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 257 } } 2025-07-08T11:59:12.282072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-07-08T11:59:12.282079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-07-08T11:59:12.282091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-07-08T11:59:12.282097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:59:12.282105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 672 RawX2: 4294969907 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-07-08T11:59:12.282114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.282118Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.282126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-07-08T11:59:12.282132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-07-08T11:59:12.283757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:12.283796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.284017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.284045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.284053Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-07-08T11:59:12.284066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:12.284070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:12.284075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:12.284078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:12.284082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-07-08T11:59:12.284097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:339:2318] message: TxId: 105 2025-07-08T11:59:12.284104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:12.284109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T11:59:12.284112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T11:59:12.284143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:12.284546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:59:12.284558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:834:2754] TestWaitNotification: OK eventTxId 105 >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:12.645045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:12.645072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.645077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:12.645082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:12.645088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:12.645092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:12.645100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.645114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:12.645189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:12.656321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:12.656340Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:12.659127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:12.659165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:12.659189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:12.660645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:12.660685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:12.660794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.661044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:12.661934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.661971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:12.662189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.662198Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.662214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:12.662220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.662225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:12.662249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.663628Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:12.679461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:12.679523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.679572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:12.679619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:12.679627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:12.680309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:12.680320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:12.680323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:12.680665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680674Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:12.680958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680967Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.680973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.680980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.681482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:12.681829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:12.681856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:12.681999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.682015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.682023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.682079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:12.682085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.682107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:12.682115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:12.682630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.682637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.682678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.682682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:12.682691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.682697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:12.682708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.682712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.682717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.682720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.682724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:12.682729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.682734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:12.682737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:12.682747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:12.682753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:12.682756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:12.683125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:12.683138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... :12.866158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T11:59:12.866162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:12.866209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:12.866215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:59:12.866226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:12.866234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:12.866239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:12.866926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T11:59:12.866939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:506:2466] 2025-07-08T11:59:12.866977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-07-08T11:59:12.867070Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867114Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 52us result status StatusPathDoesNotExist 2025-07-08T11:59:12.867164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 47us result status StatusSuccess 2025-07-08T11:59:12.867347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867443Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 15us result status StatusPathDoesNotExist 2025-07-08T11:59:12.867459Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867507Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 13us result status StatusSuccess 2025-07-08T11:59:12.867553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867601Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:12.867618Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 19us result status StatusSuccess 2025-07-08T11:59:12.867677Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:12.653864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:12.653889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.653895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:12.653899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:12.653905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:12.653909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:12.653918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.653930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:12.654004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:12.666969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:12.666991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:12.675050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:12.675123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:12.675156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:12.676392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:12.676430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:12.676519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.676642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:12.677437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.677476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:12.677664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.677671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.677683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:12.677687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.677692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:12.677710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.678685Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:12.699647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:12.699714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.699774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:12.699824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:12.699834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.700467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.700493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:12.700527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.700537Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:12.700541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:12.700545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:12.700838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.700846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:12.700849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:12.701160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.701169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.701173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.701178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.701656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:12.701974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:12.702003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:12.702154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.702176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.702185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.702233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:12.702238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.702260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:12.702271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:12.702628Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.702635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.702674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.702678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:12.702685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.702689Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:12.702696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.702698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.702701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.702703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.702705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:12.702709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.702711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:12.702713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:12.702721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:12.702725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:12.702727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:12.703021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:12.703033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... xColumn: value0, State: Unlocking, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-07-08T11:59:13.467974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-07-08T11:59:13.467997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-07-08T11:59:13.468051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-07-08T11:59:13.468055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-07-08T11:59:13.468060Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-07-08T11:59:13.468082Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.468099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 8589936742 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:13.468106Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-07-08T11:59:13.468111Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-07-08T11:59:13.468902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.468914Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-07-08T11:59:13.468924Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-07-08T11:59:13.468928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-07-08T11:59:13.468938Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-07-08T11:59:13.468943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-07-08T11:59:13.468981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-07-08T11:59:13.468992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:131:2155] message: TxId: 281474976710760 2025-07-08T11:59:13.468998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-07-08T11:59:13.469002Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-07-08T11:59:13.469006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-07-08T11:59:13.469017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-07-08T11:59:13.473612Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-07-08T11:59:13.473638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-07-08T11:59:13.473651Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2025-07-08T11:59:13.473678Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2025-07-08T11:59:13.474050Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2025-07-08T11:59:13.474072Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-07-08T11:59:13.474079Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-07-08T11:59:13.474365Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2025-07-08T11:59:13.474387Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:452:2413], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-07-08T11:59:13.474391Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-07-08T11:59:13.474410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:59:13.474416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:631:2580] TestWaitNotification: OK eventTxId 102 2025-07-08T11:59:13.474529Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:13.474579Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 58us result status StatusSuccess 2025-07-08T11:59:13.474708Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-07-08T11:58:53.989846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.995829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.995913Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.996705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.996774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.996821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.996846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.996863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.996885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.996905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.996923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.996940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.996978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.996997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.997015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:54.003110Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:54.003283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:54.003296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:54.003338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.003387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:54.003400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:54.003408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:54.003417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:54.003426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:54.003433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:54.003437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:54.003454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:54.003462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:54.003468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:54.003473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:54.003483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:54.003490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:54.003497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:54.003502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:54.003510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:54.003517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:54.003521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:54.003544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:54.003551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:54.003555Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:54.003576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:54.003585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:54.003589Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:54.003603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:54.003611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.003615Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:54.003624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:54.003631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:54.003639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:54.003643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:54.003683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:54.003693Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:54.003702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:54.003713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:54.003724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:54.003737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:54.003745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:54.003750Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:54.003763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:54.003770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... nge:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7198464;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););; 2025-07-08T11:59:12.859167Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:12.859181Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:12.859766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:12.860759Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:12.860770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:12.950037Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:12.950084Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7198464;count=779;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:13.054672Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:13.054724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:13.054737Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7069450;count=1;packed=7200040; 2025-07-08T11:59:13.054758Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=89010;count=1749; 2025-07-08T11:59:13.054768Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=173010;count=1750;size_of_meta=112; 2025-07-08T11:59:13.054778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=236010;count=875;size_of_portion=184; 2025-07-08T11:59:13.054871Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:13.054917Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:13.076978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:13.077329Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 2025-07-08T11:59:13.078195Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=36024048;raw_bytes=35366250;count=5;records=375200} inactive {blob_bytes=105325696;raw_bytes=102327000;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:13.190506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:13.190532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:13.190544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=with_appended.cpp:65;portions=222,;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:13.190658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::f6397f1a-5bf211f0-ba6da9a2-998f53fb; 2025-07-08T11:59:13.190677Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:21623968;portions_count:222;); 2025-07-08T11:59:13.190685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:13.190704Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:13.190716Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637417;tx_id=18446744073709551615;;current_snapshot_ts=1751975935591; 2025-07-08T11:59:13.190726Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:13.190736Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.190740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.190779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.894000s; 2025-07-08T11:59:13.190790Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f6397f1a-5bf211f0-ba6da9a2-998f53fb;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:13.190827Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 >> BasicUsage::ReadWithRestarts [GOOD] >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-07-08T11:58:53.525567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.528397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.528474Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.529306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.529371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.529418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.529449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.529472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.529495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.529515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.529533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.529550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.529568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.529586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.529604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.534078Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.534294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.534311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.534337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.534382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.534398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.534404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.534413Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.534424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.534430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.534434Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.534457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.534464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.534471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.534474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.534483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.534489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.534495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.534500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.534507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.534514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.534518Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.534538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.534544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.534548Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.534566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.534573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.534577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.534588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.534594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.534598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.534606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.534612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.534618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.534622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.534662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-07-08T11:58:53.534672Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.534680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.534690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:53.534700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.534712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.534719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.534724Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.534736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.534742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... n_id:10;chunk_idx:3;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7503120;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););; 2025-07-08T11:59:12.642379Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:12.642393Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:12.643469Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:12.644571Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:12.644582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:12.727107Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:12.727152Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7503120;count=812;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:12.856214Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:12.856257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:12.856266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=7504840; 2025-07-08T11:59:12.856281Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=95658;count=1749; 2025-07-08T11:59:12.856287Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=179658;count=1750;size_of_meta=112; 2025-07-08T11:59:12.856298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=242658;count=875;size_of_portion=184; 2025-07-08T11:59:12.856384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:12.856427Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:12.878769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:12.879135Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 2025-07-08T11:59:12.880130Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=37548672;raw_bytes=36867050;count=5;records=375200} inactive {blob_bytes=110272840;raw_bytes=107127800;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:13.030315Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:13.030341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:13.030351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=with_appended.cpp:65;portions=222,;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:13.030471Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::f5f3b566-5bf211f0-8f33e8e2-1ce5771; 2025-07-08T11:59:13.030488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:22538992;portions_count:222;); 2025-07-08T11:59:13.030495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:13.030515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:13.030533Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637059;tx_id=18446744073709551615;;current_snapshot_ts=1751975935138; 2025-07-08T11:59:13.030542Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:13.030551Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.030556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.030572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.869000s; 2025-07-08T11:59:13.030582Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f5f3b566-5bf211f0-8f33e8e2-1ce5771;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:13.030628Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-07-08T11:58:23.175194Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:58:23.175227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:58:23.178903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:58:23.178931Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:58:23.193436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:58:23.193600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:133:2159], cookie=12860469078538932146, session=0, seqNo=0) 2025-07-08T11:58:23.193644Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:58:23.216617Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:133:2159], cookie=12860469078538932146, session=1) 2025-07-08T11:58:23.216799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:133:2159], cookie=15422232075685752970 2025-07-08T11:58:23.216865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:146:2170], cookie=14390390058704245837) 2025-07-08T11:58:23.216880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:146:2170], cookie=14390390058704245837) 2025-07-08T11:58:23.649146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:23.665162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.061075Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.082308Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.452684Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.465357Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:24.821094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:24.832479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.213209Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.236438Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:25.606872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:25.625436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.011433Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.032072Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.438930Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.452872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:26.829294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:26.842204Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.276626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.290469Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:27.672551Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:27.685348Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.048848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.065479Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.450743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.465352Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:28.833646Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:28.845291Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.237130Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.253358Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:29.615687Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:29.629281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.025095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.041429Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.509156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.525364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:30.913702Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:30.926890Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.341114Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.361333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:31.737143Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:31.749287Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.119118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.131879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.501158Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.517288Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:32.893221Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:32.905260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.305140Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.321216Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:33.694053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:33.705257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.085252Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.098219Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.479893Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.491374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:34.873157Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:34.885892Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.325050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.337298Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:35.719387Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:35.733381Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.130652Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.145290Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.493396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.505781Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:36.864288Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:36.882410Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.268715Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.279885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:37.650116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:37.661052Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.015056Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.028591Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.401087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.418407Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:38.786022Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:38.802174Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.179305Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.191295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.589176Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:39.602118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:39.997116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.013281Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.397266Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.416922Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:40.834848Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:40.855893Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.281135Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.293803Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:41.661535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:41.689295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.091689Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.109267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.503385Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.521981Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:42.894102Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:42.905325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:43.310793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:43.321813Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:43.685227Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:43.701543Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:44.062779Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:44.074776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:44.421070Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:44.433212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:44.801147Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:44.812081Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] ... k::Execute 2025-07-08T11:58:56.316305Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:56.673212Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:56.684149Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.023465Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.034335Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.380269Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.391266Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:57.738982Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:57.750423Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.077593Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.088453Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.497525Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.508481Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:58.858308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:58.869463Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.220452Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.231500Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.578670Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.589674Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:58:59.936541Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:58:59.947439Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.303937Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.314914Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:00.661993Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:00.672995Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.025182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.036456Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.384862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.395833Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:01.741922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:01.752840Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.133083Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.144142Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.484400Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.495380Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:02.842458Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:02.853381Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:03.200749Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:03.212672Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:03.564345Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:03.577308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:03.973739Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:03.985416Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:04.341706Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:04.356433Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:04.710224Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:04.721864Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:05.084497Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:05.098126Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:05.444081Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:05.455059Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:05.822963Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:05.834501Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:06.170588Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:06.181513Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:06.521699Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:06.532645Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:06.870136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:06.881209Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:07.218036Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:07.228965Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:07.571028Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:07.582555Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:07.935311Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:07.946735Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:08.283046Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:08.294058Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:08.623720Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:08.637185Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:08.978218Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:08.989181Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:09.460260Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:09.471136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:09.830824Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:09.842134Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:10.185433Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:10.196535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:10.540191Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:10.551676Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:10.910082Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:10.921100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:11.265958Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:11.278411Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:11.651427Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:11.667399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:12.009208Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:12.020161Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:12.385661Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:12.397701Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:12.754476Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-07-08T11:59:12.766639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-07-08T11:59:13.108922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-07-08T11:59:13.108963Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-07-08T11:59:13.123900Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-07-08T11:59:13.134239Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:573:2567], cookie=12592124075374541105) 2025-07-08T11:59:13.134280Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:573:2567], cookie=12592124075374541105) 2025-07-08T11:59:13.348872Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:13.348906Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:13.352797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:13.352830Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:13.364208Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:13.364396Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:133:2159], cookie=12345, session=0, seqNo=0) 2025-07-08T11:59:13.364437Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:59:13.392415Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:133:2159], cookie=12345, session=1) 2025-07-08T11:59:13.392610Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:139:2164], cookie=23456, session=1, seqNo=0) 2025-07-08T11:59:13.404888Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:139:2164], cookie=23456, session=1) 2025-07-08T11:59:13.616002Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-07-08T11:59:13.616035Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-07-08T11:59:13.619962Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-07-08T11:59:13.619995Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-07-08T11:59:13.641316Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-07-08T11:59:13.641524Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:133:2159], cookie=12345, session=0, seqNo=0) 2025-07-08T11:59:13.641562Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-07-08T11:59:13.652335Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:133:2159], cookie=12345, session=1) 2025-07-08T11:59:13.652512Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:140:2164], cookie=23456, session=1, seqNo=0) 2025-07-08T11:59:13.663290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:140:2164], cookie=23456, session=1) >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-07-08T11:58:53.674816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.678020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.678086Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.678801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.678869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.678909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.678931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.678947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.678967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.678997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.679014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.679031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.679048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.679066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.679085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.685462Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.685675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.685690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.685719Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.685778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.685791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.685796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.685806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.685816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.685822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.685826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.685848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.685857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.685865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.685869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.685879Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.685886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.685893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.685897Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.685907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.685915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.685919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.685943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.685951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.685957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.685976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.685983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.685987Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.686001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.686008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.686012Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.686020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.686027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.686034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.686038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.686075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:53.686086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.686094Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.686118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=20; 2025-07-08T11:58:53.686129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.686141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.686148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.686153Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.686165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.686172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... 50;count=873;size_of_portion=184; 2025-07-08T11:59:13.050323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=62;sum=95396;count=1747; 2025-07-08T11:59:13.050331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=174;sum=179300;count=1748;size_of_meta=112; 2025-07-08T11:59:13.050336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=242228;count=874;size_of_portion=184; 2025-07-08T11:59:13.050393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5622; 2025-07-08T11:59:13.050404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:13.050529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=117; 2025-07-08T11:59:13.050539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=5784; 2025-07-08T11:59:13.050544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=5794; 2025-07-08T11:59:13.050551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:13.050611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=55; 2025-07-08T11:59:13.050616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=5905; 2025-07-08T11:59:13.050642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=19; 2025-07-08T11:59:13.050658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=10; 2025-07-08T11:59:13.050713Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=49; 2025-07-08T11:59:13.050748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=30; 2025-07-08T11:59:13.056134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5367; 2025-07-08T11:59:13.062179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6007; 2025-07-08T11:59:13.062209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-07-08T11:59:13.062217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:13.062224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-07-08T11:59:13.062239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2025-07-08T11:59:13.062246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:13.062263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-07-08T11:59:13.062270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:13.062285Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:13.062301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=10; 2025-07-08T11:59:13.062319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-07-08T11:59:13.062325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=18341; 2025-07-08T11:59:13.062368Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22744072;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=149450960;raw_bytes=145316940;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:13.062402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:13.062412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:13.062427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:13.062446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:13.062496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:13.062512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:13.062531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637450;tx_id=18446744073709551615;;current_snapshot_ts=1751975935288; 2025-07-08T11:59:13.062539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:13.062550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.062556Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.062579Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:13.064692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:13.064884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:13.064891Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:13.064895Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:13.064901Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:13.064921Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:13.064932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637450;tx_id=18446744073709551615;;current_snapshot_ts=1751975935288; 2025-07-08T11:59:13.064940Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:13.064962Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.064968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:13.064985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:13.064994Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:12.830184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:12.830209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.830214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:12.830219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:12.830224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:12.830228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:12.830236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:12.830249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:12.830318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:12.845023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:12.845047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:12.849523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:12.849585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:12.849614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:12.851168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:12.851214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:12.851323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.851504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:12.852401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.852437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:12.852625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.852635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.852651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:12.852658Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.852664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:12.852686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.854049Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:12.869170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:12.869246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.869311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:12.869367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:12.869379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.877427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.877480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:12.877547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.877560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:12.877565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:12.877570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:12.881683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.881710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:12.881718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:12.883087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.883100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.883106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.883113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.883782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:12.884181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:12.884214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:12.884392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:12.884414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:12.884422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.884470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:12.884475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:12.884503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:12.884512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:12.884961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:12.884974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:12.885024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:12.885030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:12.885041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:12.885047Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:12.885059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.885063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.885067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:12.885070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.885075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:12.885079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:12.885084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:12.885088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:12.885101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:12.885107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:12.885110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:12.885557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:12.885572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... tency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 113 } } 2025-07-08T11:59:13.682775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-07-08T11:59:13.682788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 113 } } 2025-07-08T11:59:13.682799Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 11 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 113 } } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T11:59:13.682997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936899 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-07-08T11:59:13.683019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 323 RawX2: 8589936899 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683026Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:59:13.683035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 323 RawX2: 8589936899 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683050Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683064Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-07-08T11:59:13.683398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936903 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-07-08T11:59:13.683426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936903 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683432Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T11:59:13.683440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 8589936903 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-07-08T11:59:13.683449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683453Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T11:59:13.683461Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-07-08T11:59:13.683721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684351Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-07-08T11:59:13.684370Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-07-08T11:59:13.684374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-07-08T11:59:13.684378Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 2/3 2025-07-08T11:59:13.684381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-07-08T11:59:13.684386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-07-08T11:59:13.684487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684512Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:13.684525Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-07-08T11:59:13.684528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-07-08T11:59:13.684532Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 3/3 2025-07-08T11:59:13.684536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-07-08T11:59:13.684540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-07-08T11:59:13.684553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:377:2345] message: TxId: 102 2025-07-08T11:59:13.684559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-07-08T11:59:13.684565Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:59:13.684569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:59:13.684595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-07-08T11:59:13.684599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:59:13.684603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-07-08T11:59:13.684607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-07-08T11:59:13.684612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-07-08T11:59:13.684615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:13.684619Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-07-08T11:59:13.684622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-07-08T11:59:13.684629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-07-08T11:59:13.684633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:13.684725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:13.684731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:13.684744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:13.684749Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:59:13.684755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:59:13.684762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:13.684768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:13.685438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:59:13.685452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:475:2436] 2025-07-08T11:59:13.685549Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] |64.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |64.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:13.528045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:13.528063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:13.528067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:13.528070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:13.528073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:13.528076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:13.528081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:13.528091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:13.528143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:13.537233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:13.537249Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:13.539934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:13.539966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:13.539992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:13.544191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:13.544251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:13.544350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.544622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:13.545431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.545470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:13.545691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:13.545700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.545717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:13.545725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:13.545730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:13.545755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.547004Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:13.562232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:13.562295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.562348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:13.562390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:13.562398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.562967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.562996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:13.563040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.563048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:13.563053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:13.563058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:13.563482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.563493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:13.563497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:13.563814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.563834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.563842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.563849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.564245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:13.564589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:13.564619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:13.564739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.564757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:13.564762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.564815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:13.564822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.564844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:13.564852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:13.565327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:13.565339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:13.567722Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.567757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:13.567802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.567811Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:13.567830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:13.567834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.567840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:13.567844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.567848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:13.567855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.567864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:13.567869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:13.567892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:13.567899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:13.567903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:13.568537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:13.568554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 046678944 2025-07-08T11:59:14.225914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:14.225924Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-07-08T11:59:14.226421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.226464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.226471Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:14.226476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T11:59:14.226487Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-07-08T11:59:14.226492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-07-08T11:59:14.226496Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-07-08T11:59:14.226499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-07-08T11:59:14.226504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-07-08T11:59:14.226520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:374:2343] message: TxId: 102 2025-07-08T11:59:14.226526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-07-08T11:59:14.226533Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T11:59:14.226537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T11:59:14.226563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:59:14.226567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:59:14.226572Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-07-08T11:59:14.226574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-07-08T11:59:14.226581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T11:59:14.226585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:14.226641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:14.226646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:59:14.226658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:59:14.226663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:14.226668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:14.227191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T11:59:14.227203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:472:2428] 2025-07-08T11:59:14.227284Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-07-08T11:59:14.227944Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:14.227991Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 54us result status StatusPathDoesNotExist 2025-07-08T11:59:14.228025Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228075Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228087Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 13us result status StatusPathDoesNotExist 2025-07-08T11:59:14.228101Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228145Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228183Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 38us result status StatusSuccess 2025-07-08T11:59:14.228291Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228353Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:14.228371Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 19us result status StatusSuccess 2025-07-08T11:59:14.228405Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::OneTable [GOOD] >> TBoardSubscriberTest::ReconnectReplica >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] >> LocalPartition::DescribeBadPartition [GOOD] >> LocalPartition::DiscoveryServiceBadPort >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::ReconnectReplica [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:13.624739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:13.624763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:13.624768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:13.624773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:13.624778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:13.624782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:13.624792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:13.624805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:13.624874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:13.638145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:13.638162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:13.642891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:13.642969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:13.643001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:13.644727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:13.644780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:13.644890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.645115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:13.645995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.646049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:13.646275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:13.646286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.646304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:13.646311Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:13.646317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:13.646344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.647706Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:13.666922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:13.667007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.667095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:13.667149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:13.667160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:13.668109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:13.668122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:13.668127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:13.668552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:13.668931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.668966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.668973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.669613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:13.670009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:13.670049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:13.670224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:13.670246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:13.670256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.670320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:13.670326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:13.670351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:13.670362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:13.671149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:13.671158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:13.671200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:13.671204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:13.671213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:13.671219Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:13.671230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:13.671234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.671238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:13.671241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.671245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:13.671264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:13.671268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:13.671272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:13.671282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:13.671287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:13.671291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:13.671669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:13.671687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 08T11:59:14.779196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-07-08T11:59:14.779201Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-07-08T11:59:14.779211Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-07-08T11:59:14.779240Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-07-08T11:59:14.779260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:14.779282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:59:14.779905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.779934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780188Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:14.780237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:14.780261Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2206], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-07-08T11:59:14.780270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2206], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-07-08T11:59:14.780363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780370Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-07-08T11:59:14.780386Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T11:59:14.780395Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-07-08T11:59:14.780550Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-07-08T11:59:14.780562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-07-08T11:59:14.780565Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-07-08T11:59:14.780570Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-07-08T11:59:14.780575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:14.780706Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-07-08T11:59:14.780718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-07-08T11:59:14.780722Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-07-08T11:59:14.780726Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:14.780730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:59:14.780740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-07-08T11:59:14.781994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.782007Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:14.782079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:59:14.782103Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-07-08T11:59:14.782108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-07-08T11:59:14.782113Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-07-08T11:59:14.782116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-07-08T11:59:14.782119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-07-08T11:59:14.782131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:333:2312] message: TxId: 108 2025-07-08T11:59:14.782137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-07-08T11:59:14.782141Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-07-08T11:59:14.782145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-07-08T11:59:14.782163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:14.782353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-07-08T11:59:14.782750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-07-08T11:59:14.782834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-07-08T11:59:14.782841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:832:2790] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-07-08T11:59:14.782972Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-07-08T11:59:14.782981Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-07-08T11:59:14.796902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 8589936884 } TabletId: 72075186233409546 State: 4 2025-07-08T11:59:14.796940Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-07-08T11:59:14.799500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T11:59:14.799629Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-07-08T11:59:14.800127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.800197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:14.800418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:14.800427Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:14.800441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:14.805883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T11:59:14.805908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T11:59:14.805980Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-07-08T11:59:14.806143Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:14.806192Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 60us result status StatusSuccess 2025-07-08T11:59:14.806296Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table_reboots/unittest >> TExternalTableTestReboots::DropReplacedExternalTableWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:58:52.595961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:58:52.595982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.595988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:58:52.595993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:58:52.595998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:58:52.596003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:58:52.596011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:58:52.596029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:58:52.596112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:58:52.608782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:58:52.608803Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.621419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:58:52.621467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:58:52.621491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:58:52.629159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:58:52.629349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:58:52.629435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.629463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:58:52.629916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.629940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:58:52.630096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.630102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.630117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:58:52.630123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.630127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:58:52.630150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:58:52.631424Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:58:52.651946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:58:52.652007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.652062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:58:52.652109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:58:52.652121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.652782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.652809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:58:52.652849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.652858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:58:52.652863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:58:52.652868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:58:52.653359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.653371Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:58:52.653376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:58:52.653712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.653722Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.653728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.653734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.654266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:58:52.654568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:58:52.654590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:58:52.654700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:58:52.654716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:58:52.654721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.654776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:58:52.654780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:58:52.654796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:58:52.654804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:58:52.655087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:58:52.655092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:58:52.655114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:58:52.655117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:58:52.655154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:58:52.655158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:58:52.655165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.655167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.655170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:58:52.655172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:58:52.655174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:58:52.655177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 7-08T11:59:14.532963Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.532967Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:14.532970Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T11:59:14.532974Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:59:14.532985Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-07-08T11:59:14.533465Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2025-07-08T11:59:14.533489Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000006 2025-07-08T11:59:14.533758Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.533778Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 356482287726 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:14.533785Z node 83 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 1005:0 HandleReply TEvOperationPlan: step# 5000006 2025-07-08T11:59:14.533819Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:14.533833Z node 83 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 128 -> 240 2025-07-08T11:59:14.533853Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:14.533859Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:14.533865Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:14.534048Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.534089Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-07-08T11:59:14.534450Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:14.534457Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:14.534478Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:14.534501Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:14.534517Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:14.534521Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-07-08T11:59:14.534525Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-07-08T11:59:14.534528Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-07-08T11:59:14.534574Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.534584Z node 83 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-07-08T11:59:14.534594Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T11:59:14.534597Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:14.534602Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T11:59:14.534604Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:14.534608Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-07-08T11:59:14.534613Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T11:59:14.534617Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2025-07-08T11:59:14.534620Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2025-07-08T11:59:14.534629Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:14.534640Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:14.534645Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1005, publications: 3, subscribers: 0 2025-07-08T11:59:14.534648Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-07-08T11:59:14.534651Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:14.534654Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:14.534812Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.534825Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.534829Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:14.534833Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:14.534837Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:14.534927Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:14.534932Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:14.534939Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:14.534991Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.535006Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.535012Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:14.535015Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-07-08T11:59:14.535018Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:14.535128Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.535137Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.535140Z node 83 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T11:59:14.535144Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:14.535147Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:14.535155Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-07-08T11:59:14.535914Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.535966Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:14.535982Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T11:59:14.536205Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-07-08T11:59:14.536268Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-07-08T11:59:14.536274Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-07-08T11:59:14.536328Z node 83 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-07-08T11:59:14.536354Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-07-08T11:59:14.536358Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:413:2404] TestWaitNotification: OK eventTxId 1005 >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::ManySubscribersManyPublisher >> TBoardSubscriberTest::NotAvailableByShutdown >> TSchemeShardMoveTest::Replace [GOOD] |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/erasure/ut/unittest >> TErasureTypeTest::TestAllSpeciesCrcWhole1of2 [GOOD] |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |64.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:14.544183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:14.544210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:14.544216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:14.544221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:14.544226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:14.544231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:14.544239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:14.544254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:14.544339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:14.556596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:14.556620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:14.562471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:14.562535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:14.562566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:14.564131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:14.564182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:14.564281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.564430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:14.565409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:14.565449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:14.565670Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:14.565680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:14.565698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:14.565705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:14.565711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:14.565736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.567588Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:14.590982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:14.591067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.591137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:14.591191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:14.591203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:14.592115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:14.592130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:14.592136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:14.592537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:14.592980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.592998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:14.593004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:14.593606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:14.593998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:14.594037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:14.594219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:14.594244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:14.594254Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:14.594315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:14.594323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:14.594352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:14.594365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:14.594787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:14.594797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:14.594839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:14.594844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:14.594855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:14.594861Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:14.594872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:14.594876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:14.594881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:14.594884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:14.594889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:14.594894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:14.594899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:14.594902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:14.594914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:14.594919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:14.594923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:14.595328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:14.595347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T11:59:15.489375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 2025-07-08T11:59:15.489524Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T11:59:15.489566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:15.489571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-07-08T11:59:15.489582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-07-08T11:59:15.489587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-07-08T11:59:15.489593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-07-08T11:59:15.489657Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409548 2025-07-08T11:59:15.489932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T11:59:15.489973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409548 2025-07-08T11:59:15.490395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:15.490432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-07-08T11:59:15.495869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T11:59:15.495902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-07-08T11:59:15.496638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-07-08T11:59:15.496669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:15.496680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-07-08T11:59:15.496711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-07-08T11:59:15.496719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-07-08T11:59:15.496724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-07-08T11:59:15.496728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-07-08T11:59:15.496734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:15.496794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T11:59:15.496799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T11:59:15.496825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T11:59:15.496832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-07-08T11:59:15.497719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-07-08T11:59:15.497848Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-07-08T11:59:15.497862Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-07-08T11:59:15.497870Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-07-08T11:59:15.497995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:15.498044Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 67us result status StatusPathDoesNotExist 2025-07-08T11:59:15.498081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:15.498127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:15.498163Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 37us result status StatusSuccess 2025-07-08T11:59:15.498260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:15.498341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:15.498356Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 17us result status StatusSuccess 2025-07-08T11:59:15.498401Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest |65.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TxUsage::WriteToTopic_Demo_14 [GOOD] >> KqpRm::ManyTasks >> KqpRm::NodesMembershipByExchanger >> KqpRm::SingleTask >> KqpRm::Reduce >> TxUsage::WriteToTopic_Demo_15 >> KqpRm::ResourceBrokerNotEnoughResources >> KqpRm::NotEnoughMemory |65.0%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-07-08T11:58:57.087167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:57.091046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:57.091098Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:57.091789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:57.091839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:57.091872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:57.091893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:57.091910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:57.091930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:57.091946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:57.091964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:57.091981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:57.091998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.092015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:57.092032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:57.096941Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:57.097119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:57.097130Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:57.097155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.097192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:57.097205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:57.097210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:57.097219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:57.097228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:57.097234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:57.097238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:57.097259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:57.097267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:57.097274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:57.097279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:57.097288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:57.097294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:57.097301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:57.097305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:57.097313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:57.097319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:57.097324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:57.097345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:57.097351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:57.097356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:57.097375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:57.097382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:57.097386Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:57.097398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:57.097404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.097408Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:57.097415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:57.097422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:57.097429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:57.097433Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:57.097464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:58:57.097474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:57.097481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:57.097491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:57.097501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:57.097511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:57.097518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:57.097524Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:57.097546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:57.097552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... nge:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7198464;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););; 2025-07-08T11:59:16.269376Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:16.269390Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.270004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:16.270962Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:16.270973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.350635Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:16.350684Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7198464;count=779;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:16.499677Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:16.499734Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:16.499747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7069450;count=1;packed=7200040; 2025-07-08T11:59:16.499767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=89010;count=1749; 2025-07-08T11:59:16.499776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=173010;count=1750;size_of_meta=112; 2025-07-08T11:59:16.499786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=236010;count=875;size_of_portion=184; 2025-07-08T11:59:16.499872Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.499918Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:16.521894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.522273Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 2025-07-08T11:59:16.523276Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=36024048;raw_bytes=35366250;count=5;records=375200} inactive {blob_bytes=105325696;raw_bytes=102327000;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:16.598408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.598431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:16.598441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=with_appended.cpp:65;portions=222,;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.598550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::f835fe74-5bf211f0-a5c10f6b-2b5603ef; 2025-07-08T11:59:16.598567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:21623968;portions_count:222;); 2025-07-08T11:59:16.598575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:16.598594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:16.598607Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975640521;tx_id=18446744073709551615;;current_snapshot_ts=1751975938694; 2025-07-08T11:59:16.598616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:16.598626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:16.598632Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:16.598649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.894000s; 2025-07-08T11:59:16.598659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=f835fe74-5bf211f0-a5c10f6b-2b5603ef;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:16.598698Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 >> KqpRm::NotEnoughExecutionUnits >> KqpRm::SingleTask [GOOD] >> KqpRm::ManyTasks [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> Normalizers::SchemaVersionsNormalizer >> KqpRm::SingleSnapshotByExchanger >> KqpRm::Reduce [GOOD] >> TCacheTest::WatchRoot >> KqpRm::NotEnoughMemory [GOOD] >> TCacheTest::Navigate >> TCacheTest::RacyRecreateAndSync >> KqpRm::NotEnoughExecutionUnits [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-07-08T11:59:17.362278Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.392792Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.393039Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.394606Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.405016Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.406657Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.406701Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.407130Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.407289Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.407532Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.407540Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.407568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.409467Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.409512Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.409523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.409553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.409564Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.409579Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.433312Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.433359Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.445301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.445349Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.445365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.445377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.445398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.445406Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.445412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.445421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.461562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.461599Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.473657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.473714Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.473950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.473957Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.475728Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.475745Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.476034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.476327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.476398Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat 2025-07-08T11:59:17.476406Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat 2025-07-08T11:59:17.476572Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.476657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.476679Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.476695Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.476725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.476751Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.477305Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.477363Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.488024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.488234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.490538Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.490713Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.490848Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001354/r3tmp/tmpszyHtR/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15605651460966073943 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.491008Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.491063Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.491070Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.491116Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.491127Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.491152Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.491171Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.491176Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.491181Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.491191Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.491299Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.491325Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.491331Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.491347Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:17.491421Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.491428Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.491432Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.491440Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:17.492197Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.492207Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:17.492242Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.492247Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:17.492438Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:17.492450Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.492458Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.492461Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.492573Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:17.492619Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.492625Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.492628Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.515202Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.515225Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.515230Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.515236Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-07-08T11:59:17.355029Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.395714Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.395851Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.397137Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.406281Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.407868Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.407910Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.408290Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.408449Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.408652Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.408659Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.408681Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.410635Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.410666Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.410685Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.410714Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.410725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.410740Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.433662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.433709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.445302Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.445349Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.445365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.445377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.445398Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.445408Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.445414Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.445422Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.460348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.460404Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.474937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.474987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.475184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.475190Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.476909Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.476927Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.477207Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.477481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.477532Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat 2025-07-08T11:59:17.477536Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat 2025-07-08T11:59:17.477667Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.477728Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.477742Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.477750Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.477770Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.477787Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.478356Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.478403Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.490393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.490606Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.493014Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.493179Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.493320Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001367/r3tmp/tmpcSqDVn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16518112254901960366 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.493460Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.493513Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.493521Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.493572Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.493582Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.493603Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.493621Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.493627Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.493634Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.493644Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.493752Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.493762Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.493766Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.493778Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:17.493852Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.493861Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.493865Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.493873Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:17.494629Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.494642Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:17.494673Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.494678Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:17.494853Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:17.494865Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.494872Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.494875Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.494985Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:17.495030Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.495036Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.495039Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.517566Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.517588Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.517594Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.517600Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-07-08T11:59:17.601727Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.634603Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.634786Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.636270Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.659639Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.661618Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.661670Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.662141Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.662331Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.662603Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.662613Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.662643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.664812Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.664854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.664869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.664904Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.664919Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.664934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.687820Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.687866Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.699446Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.699498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.699516Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.699527Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.699552Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.699561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.699568Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.699576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.710482Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.710535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.721559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.721623Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.721854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.721863Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.723645Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.723663Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.723987Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.724287Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.724358Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat 2025-07-08T11:59:17.724366Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat 2025-07-08T11:59:17.724538Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.724627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.724649Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.724665Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.724697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.724722Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.727838Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.727923Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.738905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.739172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.742235Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.742406Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.742633Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001337/r3tmp/tmpM3eKHN/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14414154444206180681 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.742829Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.742890Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.742897Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.742950Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.742961Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.742988Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.743007Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.743012Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.743018Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.743028Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.743149Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.743159Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.743164Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.743180Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:17.743265Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.743272Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.743276Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.743284Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:17.744052Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.744063Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:17.744094Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.744098Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:17.744311Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:17.744323Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.744330Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.744333Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.744447Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:17.744492Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.744498Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.744500Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.763117Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.763141Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.763146Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.763151Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-07-08T11:59:17.436907Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.467577Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.467749Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.469105Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.479527Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.481260Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.481313Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.481749Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.481965Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.482233Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.482242Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.482269Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.484404Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.484437Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.484450Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.484481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.484494Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.484509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.512056Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.512097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.529006Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.529054Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.529073Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.529086Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.529114Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.529124Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.529131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.529140Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.553292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.553350Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.569196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.569257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.569479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.569487Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.571259Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.571277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.571631Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.571939Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.572014Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat 2025-07-08T11:59:17.572022Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat 2025-07-08T11:59:17.572202Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.572294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.572317Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.572334Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.572367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.572393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.572789Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.572842Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.583802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.584082Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.586331Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.586481Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.586632Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001341/r3tmp/tmpPbx1TY/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 709925405448016553 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.586812Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.586868Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.586875Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.586930Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.586942Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.586966Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.586985Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.586990Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.586997Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.587007Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.587127Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.587137Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.587142Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.587155Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:17.587244Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.587254Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.587257Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.587266Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:17.588093Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.588103Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:17.588132Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.588137Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:17.588361Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:17.588370Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.588378Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.588381Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.588493Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:17.588538Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.588544Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.588547Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.607492Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.607513Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.607517Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.607523Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-07-08T11:59:17.734690Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.768888Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.769069Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.770307Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.780778Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.784649Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.784701Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.785014Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.785144Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.785808Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.785818Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.785849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.791902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.791931Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.791950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.791966Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.791982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.791998Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.824677Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.824725Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.836868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.836916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.836931Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.836942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.836982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.836992Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.836999Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.837007Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.847874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.847932Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.858830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.858895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.859086Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.859093Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.860877Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.860895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.861284Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.861376Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.861617Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.861687Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat 2025-07-08T11:59:17.861694Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat 2025-07-08T11:59:17.861901Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.861927Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.861943Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.861985Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.862034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.862421Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.862464Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.873394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.873430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.875636Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.875772Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.875926Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/00132a/r3tmp/tmp6pZG7Y/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11290546738762797164 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.876084Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.876150Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.876156Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.876235Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.876260Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.876277Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.876282Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.876288Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.876293Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.876402Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.876410Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.876415Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.876429Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2307] 2025-07-08T11:59:17.876440Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.877291Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.877303Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.877307Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.877316Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:421:2116] 2025-07-08T11:59:17.877329Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.877335Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:408:2303] 2025-07-08T11:59:17.877363Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.877366Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:409:2112] 2025-07-08T11:59:17.877529Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2307]} 2025-07-08T11:59:17.877579Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.877587Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.877590Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.877699Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:421:2116]} 2025-07-08T11:59:17.877759Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.877764Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.877767Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.896201Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.896225Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.896229Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.896235Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> TCacheTest::PathBelongsToDomain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/py3test >> test_watermarks.py::TestWatermarks::test_idle_watermarks[v1-mvp_external_ydb_endpoint0] [GOOD] Test command err: contrib/tools/python3/Lib/multiprocessing/popen_fork.py:66: DeprecationWarning: This process (pid=296819) is multi-threaded, use of fork() may lead to deadlocks in the child. contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback contrib/tools/python3/Lib/multiprocessing/process.py:125: ResourceWarning: unclosed ResourceWarning: Enable tracemalloc to get the object allocation traceback ::1 - - [08/Jul/2025 11:59:04] send response localhost:24895/?database=local ::1 - - [08/Jul/2025 11:59:04] "GET /database?databaseId=FakeDatabaseId HTTP/1.1" 200 - sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/43nv/0018d1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/stdout'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedWriter name='/home/runner/.ya/build/build_root/43nv/0018d1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/stderr'> ResourceWarning: Enable tracemalloc to get the object allocation traceback sys:1: ResourceWarning: unclosed file <_io.BufferedRandom name='/home/runner/.ya/build/build_root/43nv/0018d1/ydb/tests/fq/yds/test-results/py3test/testing_out_stuff/chunk42/testing_out_stuff/test_stop.py.TestStop.test_stop_query.v1-streaming/default/node_1/logfile_6ok6952z.log'> ResourceWarning: Enable tracemalloc to get the object allocation traceback ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-07-08T11:59:17.890822Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.914327Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.914485Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.915295Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.927006Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.928538Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.928577Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.928849Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.928998Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.929456Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.929466Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.929493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.931247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.931264Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.931276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.931288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.931299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.931324Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.963920Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.963968Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.975779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.975825Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.975840Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.975853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.975876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.975885Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.975892Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.975903Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.986788Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.986833Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.997712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.997777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.997959Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.997967Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.999741Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.999759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:18.000088Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:18.000168Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:18.000396Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:18.000457Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat 2025-07-08T11:59:18.000465Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat 2025-07-08T11:59:18.000673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:18.000698Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:18.000712Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:18.000750Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:18.000793Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:18.001242Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:18.001291Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:18.013871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:18.013925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:18.016466Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:18.016631Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:18.016813Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/00130b/r3tmp/tmp81XJfK/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7131635435121840397 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:18.017117Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:18.017200Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:18.017205Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:18.017249Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:18.017271Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:18.017289Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:18.017293Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:18.017299Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:18.017303Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:18.017392Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:18.017401Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:18.017405Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:18.017419Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2307] 2025-07-08T11:59:18.017430Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:18.018238Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:18.018257Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:18.018261Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:18.018275Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:421:2116] 2025-07-08T11:59:18.018296Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:18.018304Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:408:2303] 2025-07-08T11:59:18.018345Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:18.018350Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:409:2112] 2025-07-08T11:59:18.018540Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2307]} 2025-07-08T11:59:18.018592Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:18.018600Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:18.018603Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:18.018716Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:421:2116]} 2025-07-08T11:59:18.018762Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:18.018767Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:18.018771Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:18.040164Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:18.040187Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:18.040192Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:18.040199Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::CheckSystemViewAccess >> KqpRm::NodesMembershipByExchanger [GOOD] >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved |65.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-07-08T11:59:18.234039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.234063Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.279640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:18.283155Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-07-08T11:59:18.440087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.440113Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.461918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-07-08T11:59:18.463021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:18.463849Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-07-08T11:59:18.465117Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:227:2205], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:59:18.465150Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:229:2207], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] >> Normalizers::SchemaVersionsNormalizer [GOOD] |65.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |65.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-07-08T11:59:18.356354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.356377Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.410949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:18.413635Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T11:59:18.413992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:59:18.419031Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T11:59:18.431051Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-07-08T11:59:18.696054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.696082Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.712991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:18.715077Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 >> TCacheTest::CookiesArePreserved [GOOD] |65.0%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |65.0%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-07-08T11:59:17.289588Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:17.344123Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:17.344266Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:17.345733Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:17.374702Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:17.376456Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:17.376503Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:17.376909Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:17.377090Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:17.377337Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.377346Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:17.377377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:17.385184Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:17.385237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:17.385250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:17.385296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.385308Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:17.385327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.413416Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:17.413448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.429202Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:17.429247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.429263Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:17.429274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.429296Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:17.429304Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.429309Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:17.429316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.441557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:17.441595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.454145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:17.454212Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:17.454434Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:17.454442Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:17.456207Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:17.456225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:17.456582Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:17.456895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:17.457084Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat 2025-07-08T11:59:17.457097Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat 2025-07-08T11:59:17.457299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:17.457382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:17.457406Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.457422Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:17.457453Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.457480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:17.457918Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:17.457993Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:17.471030Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:17.471334Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:17.473613Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:17.473765Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:17.473900Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001342/r3tmp/tmp2wGq2J/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1884888918242861126 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:17.474072Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.474128Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.474135Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:17.474182Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:17.474193Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.474216Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.474233Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.474238Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:17.474243Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:17.474252Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:17.474360Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.474368Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.474372Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.474386Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:17.474461Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:17.474466Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:17.474470Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:17.474476Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:17.475227Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.475233Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:17.475260Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:17.475263Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:17.475440Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:17.475450Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.475457Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.475460Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.475557Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:17.475596Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:17.475601Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:17.475603Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:17.506024Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.506049Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.506054Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:17.506060Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:17.519075Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-07-08T11:59:17.555986Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:17.557352Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-07-08T11:59:17.557415Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:17.558311Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:59:17.591789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2025-07-08T11:59:17.645870Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-07-08T11:59:17.687495Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2025-07-08T11:59:18.014365Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:18.014860Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:18.014916Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> TCacheTest::MigrationLostMessage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-07-08T11:59:18.095812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:18.099831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:124:2156];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:18.099890Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:18.100544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-07-08T11:59:18.100587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:18.100626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:18.100654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:18.100670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:18.100686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:18.100701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:18.100719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:18.100735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:18.100750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:18.100766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:18.100785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:18.100803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:124:2156];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:18.111610Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:18.111696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-07-08T11:59:18.111706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-07-08T11:59:18.111775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:18.111788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-07-08T11:59:18.111795Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-07-08T11:59:18.111820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:18.111832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:18.111841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:18.111845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-07-08T11:59:18.111854Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:18.111861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:18.111867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:18.111872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:18.111887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:18.111893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:18.111908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:18.111911Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:18.111920Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:18.111927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:18.111934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:18.111938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:18.111946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:18.111952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:18.111958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:18.111965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:18.111972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:18.111976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:18.111994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:18.112001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:18.112005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:18.112017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:18.112024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:18.112029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:18.112035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:18.112042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:18.112045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:18.112052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:18.112059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:18.112064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:18.112076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:18.112083Z node 1 :TX_COLUMNSHAR ... a.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-07-08T11:59:19.105158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:19.105464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T11:59:19.105473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T11:59:19.105479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-07-08T11:59:19.105492Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-07-08T11:59:19.105501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:19.105514Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.105520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-07-08T11:59:19.105526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:19.105570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:19.105590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.105596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:19.105606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-07-08T11:59:19.105617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-07-08T11:59:19.105695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:503:2508];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-07-08T11:59:19.105712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.105725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.105732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.106134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:19.106150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.106155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.106160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:508:2512] finished for tablet 9437184 2025-07-08T11:59:19.106214Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:503:2508];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.058},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.059}],"full":{"a":1751975959047139,"name":"_full_task","f":1751975959047139,"d_finished":0,"c":0,"l":1751975959106168,"d":59029},"events":[{"name":"bootstrap","f":1751975959047194,"d_finished":533,"c":1,"l":1751975959047727,"d":533},{"a":1751975959106129,"name":"ack","f":1751975959105564,"d_finished":171,"c":1,"l":1751975959105735,"d":210},{"a":1751975959106125,"name":"processing","f":1751975959048858,"d_finished":40972,"c":16,"l":1751975959105735,"d":41015},{"name":"ProduceResults","f":1751975959047522,"d_finished":518,"c":19,"l":1751975959106157,"d":518},{"a":1751975959106158,"name":"Finish","f":1751975959106158,"d_finished":0,"c":0,"l":1751975959106168,"d":10},{"name":"task_result","f":1751975959048863,"d_finished":40739,"c":15,"l":1751975959105535,"d":40739}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.106225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:503:2508];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:19.106247Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:503:2508];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.058},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.059}],"full":{"a":1751975959047139,"name":"_full_task","f":1751975959047139,"d_finished":0,"c":0,"l":1751975959106229,"d":59090},"events":[{"name":"bootstrap","f":1751975959047194,"d_finished":533,"c":1,"l":1751975959047727,"d":533},{"a":1751975959106129,"name":"ack","f":1751975959105564,"d_finished":171,"c":1,"l":1751975959105735,"d":271},{"a":1751975959106125,"name":"processing","f":1751975959048858,"d_finished":40972,"c":16,"l":1751975959105735,"d":41076},{"name":"ProduceResults","f":1751975959047522,"d_finished":518,"c":19,"l":1751975959106157,"d":518},{"a":1751975959106158,"name":"Finish","f":1751975959106158,"d_finished":0,"c":0,"l":1751975959106229,"d":71},{"name":"task_result","f":1751975959048863,"d_finished":40739,"c":15,"l":1751975959105535,"d":40739}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-07-08T11:59:19.106257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:19.047026Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=2776560;inserted_portions_bytes=0;committed_portions_bytes=2488696;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265256;selected_rows=0; 2025-07-08T11:59:19.106260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:19.106294Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:508:2512];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> TCacheTest::Recreate >> TCacheTest::Attributes >> TCacheTest::SystemView >> TCacheTest::List >> TCacheTest::MigrationCommon >> KqpRm::SingleSnapshotByExchanger [GOOD] >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-07-08T11:59:19.079486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.079510Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.130255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-07-08T11:59:19.132214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:19.132785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T11:59:19.132886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-07-08T11:59:19.133340Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:206:2196], for# user1@builtin, access# DescribeSchema 2025-07-08T11:59:19.133390Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:212:2202], for# user1@builtin, access# 2025-07-08T11:59:19.280672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.280696Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.302515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-07-08T11:59:19.303544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:19.304285Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::SysLocks [GOOD] >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-07-08T11:59:18.078544Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:18.104562Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:18.104711Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:18.106311Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:18.125326Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:18.126736Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:18.126781Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:18.127061Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:18.127182Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:18.127685Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:18.127696Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:18.127724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:18.129572Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:18.129591Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:18.129604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:18.129616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:18.129641Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:18.129657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:18.166669Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:18.166724Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:18.178129Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:18.178178Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:18.178195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:18.178206Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:18.178230Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:18.178238Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:18.178244Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:18.178252Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:18.189930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:18.189983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:18.200809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:18.200871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:18.201102Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:18.201111Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:18.202700Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:18.202716Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:18.203048Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:18.203131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:18.203366Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:18.203439Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat 2025-07-08T11:59:18.203446Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat 2025-07-08T11:59:18.203643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:18.203668Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:18.203683Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:18.203719Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:18.203763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:18.204094Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:18.204153Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:18.215004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:18.215046Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:18.217428Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:18.217573Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:18.217717Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/00129b/r3tmp/tmpQYcd9n/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1988897906060783358 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:18.217868Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:18.217928Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:18.217934Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:18.217969Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:18.217992Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:18.218027Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:18.218032Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:18.218039Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:18.218044Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:18.218148Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:18.218157Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:18.218161Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:18.218175Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2307] 2025-07-08T11:59:18.218185Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:18.218986Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:18.218997Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:18.219000Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:18.219009Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:421:2116] 2025-07-08T11:59:18.219019Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:18.219024Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:408:2303] 2025-07-08T11:59:18.219053Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:18.219057Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:409:2112] 2025-07-08T11:59:18.219211Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2307]} 2025-07-08T11:59:18.219255Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:18.219262Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:18.219265Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:18.219402Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:421:2116]} 2025-07-08T11:59:18.219448Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:18.219454Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:18.219457Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:18.258373Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:18.258397Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:18.258401Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:18.258407Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:18.277206Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-07-08T11:59:18.309851Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:18.311197Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-07-08T11:59:18.311229Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:18.311761Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:59:18.350198Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2025-07-08T11:59:18.383961Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-07-08T11:59:18.428471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2025-07-08T11:59:18.758473Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:18.759651Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:18.759712Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> BasicUsage::SessionNotDestroyedWhileCompressionInFlight [GOOD] >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> TCacheTest::MigrationCommit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-07-08T11:59:19.855537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.855559Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.903500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:19.906374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-07-08T11:59:20.061172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.061195Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.076618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-07-08T11:59:20.078189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-07-08T11:59:20.078727Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:198:2188], for# user1@builtin, access# DescribeSchema 2025-07-08T11:59:20.078779Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:202:2192], for# user1@builtin, access# DescribeSchema ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-07-08T11:59:19.816779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.816804Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.870517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:19.873434Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T11:59:19.873679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:59:19.878688Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T11:59:19.893785Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-07-08T11:59:20.024310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.024323Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.039902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-07-08T11:59:19.931486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.931515Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.978421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-07-08T11:59:20.147273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.147299Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.162640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-07-08T11:59:20.166874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T11:59:20.230400Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> TCacheTest::MigrationUndo [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-07-08T11:59:20.002820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.002838Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.046348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-07-08T11:59:20.050291Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T11:59:20.050343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T11:59:20.050359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-07-08T11:59:20.226096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.226120Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.242944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:176:2067] recipient: [2:47:2094] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:180:2067] recipient: [2:178:2172] Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:182:2067] recipient: [2:178:2172] 2025-07-08T11:59:20.249106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.249128Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:212:2067] recipient: [2:24:2071] 2025-07-08T11:59:20.271086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-07-08T11:59:20.272636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:248:2067] recipient: [2:239:2214] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:248:2067] recipient: [2:239:2214] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:244:2218] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:244:2218] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2220] sender: [2:254:2067] recipient: [2:239:2214] Leader for TabletID 72075186233409547 is [2:256:2222] sender: [2:257:2067] recipient: [2:244:2218] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-07-08T11:59:20.288090Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:251:2220] sender: [2:290:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:256:2222] sender: [2:291:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-07-08T11:59:20.326428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:337:2286] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:337:2286] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:344:2290] sender: [2:345:2067] recipient: [2:337:2286] Leader for TabletID 72075186233409548 is [2:344:2290] sender: [2:346:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-07-08T11:59:20.410593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:417:2335] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:417:2335] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:423:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:423:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:424:2338] sender: [2:425:2067] recipient: [2:417:2335] 2025-07-08T11:59:20.420689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.420708Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:424:2338] sender: [2:452:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-07-08T11:59:20.455052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:20.455063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:20.455119Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.455134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T11:59:20.466453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.466538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:512:2067] recipient: [2:47:2094] Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:516:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:517:2067] recipient: [2:515:2409] Leader for TabletID 72057594046678944 is [2:518:2410] sender: [2:519:2067] recipient: [2:515:2409] 2025-07-08T11:59:20.473098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.473119Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:518:2410] sender: [2:546:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } >> KqpRm::SnapshotSharingByExchanger |65.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-07-08T11:59:19.750144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.750172Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:19.796476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:176:2067] recipient: [1:47:2094] Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:179:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:180:2067] recipient: [1:178:2172] Leader for TabletID 72057594046678944 is [1:181:2173] sender: [1:182:2067] recipient: [1:178:2172] 2025-07-08T11:59:19.803721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:19.803745Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:181:2173] sender: [1:212:2067] recipient: [1:24:2071] 2025-07-08T11:59:19.825853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-07-08T11:59:19.827242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2215] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2215] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:241:2216] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:249:2067] recipient: [1:241:2216] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:254:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:254:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:252:2220] sender: [1:256:2067] recipient: [1:240:2215] Leader for TabletID 72075186233409547 is [1:255:2222] sender: [1:257:2067] recipient: [1:241:2216] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-07-08T11:59:19.850270Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:252:2220] sender: [1:290:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:255:2222] sender: [1:291:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-07-08T11:59:19.889670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:342:2067] recipient: [1:337:2286] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:342:2067] recipient: [1:337:2286] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:345:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:345:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:344:2290] sender: [1:346:2067] recipient: [1:337:2286] TestWaitNotification: OK eventTxId 103 Leader for TabletID 72075186233409548 is [1:344:2290] sender: [1:363:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-07-08T11:59:20.004766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:418:2335] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:418:2335] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:423:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:423:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:425:2338] sender: [1:426:2067] recipient: [1:418:2335] 2025-07-08T11:59:20.022612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.022634Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [1:425:2338] sender: [1:453:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-07-08T11:59:20.046446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:20.046462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:20.046542Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.046561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T11:59:20.058041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.058152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-07-08T11:59:20.093071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-07-08T11:59:20.172383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:628:2067] recipient: [1:624:2505] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:628:2067] recipient: [1:624:2505] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:630:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:630:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:631:2508] sender: [1:632:2067] recipient: [1:624:2505] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 Leader for TabletID 72075186233409550 is [1:631:2508] sender: [1:651:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 110 2025-07-08T11:59:20.362537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.362558Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.378693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-07-08T11:59:20.379650Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:175:2171], Recipient [2:71:2110]: NActors::TEvents::TEvPoison 2025-07-08T11:59:20.379794Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:176:2067] recipient: [2:47:2094] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:180:2067] recipient: [2:178:2172] Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:182:2067] recipient: [2:178:2172] 2025-07-08T11:59:20.380899Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received ev ... 78944 2025-07-08T11:59:20.583793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:59:20.583800Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:20.583815Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583841Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583858Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583889Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583899Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583937Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583946Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583966Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583977Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.583985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584005Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584015Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584041Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584063Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584070Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584076Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584087Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584092Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584097Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584121Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:20.584340Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:20.584370Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:20.584549Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [2:517:2403], Recipient [2:517:2403]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-07-08T11:59:20.584556Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-07-08T11:59:20.584745Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584755Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:20.584773Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:20.584782Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:20.584788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:20.584791Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:20.584822Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:533:2403], Recipient [2:517:2403]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-07-08T11:59:20.584826Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-07-08T11:59:20.584830Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.595185Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:161:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:59:20.595225Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:161:2158], cacheItem# { Subscriber: { Subscriber: [2:383:2320] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T11:59:20.595282Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:545:2420], recipient# [2:544:2419], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T11:59:20.595329Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:161:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:59:20.595339Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:161:2158], cacheItem# { Subscriber: { Subscriber: [2:392:2323] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T11:59:20.595371Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:547:2422], recipient# [2:546:2421], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T11:59:20.595422Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:161:2158], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T11:59:20.595432Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:161:2158], cacheItem# { Subscriber: { Subscriber: [2:401:2326] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T11:59:20.595449Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:549:2424], recipient# [2:548:2423], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTDiscoverFaultToleranceTestErasureMirror3dc [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |65.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> HullReplWriteSst::Basic [GOOD] |65.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3of4 [GOOD] |65.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/vdisk/repl/ut/unittest >> HullReplWriteSst::Basic [GOOD] Test command err: commit chunk# 1 {ChunkIdx: 1 Offset: 101232640 Size: 32981748} 749583 commit chunk# 2 {ChunkIdx: 2 Offset: 101208064 Size: 33007444} 750167 commit chunk# 3 {ChunkIdx: 3 Offset: 101208064 Size: 33009556} 750215 commit chunk# 4 {ChunkIdx: 4 Offset: 101208064 Size: 33009644} 750217 commit chunk# 5 {ChunkIdx: 5 Offset: 101224448 Size: 32993276} 749845 commit chunk# 6 {ChunkIdx: 6 Offset: 101220352 Size: 32996796} 749925 commit chunk# 7 {ChunkIdx: 7 Offset: 101212160 Size: 33005552} 750124 commit chunk# 8 {ChunkIdx: 8 Offset: 101220352 Size: 32996664} 749922 commit chunk# 9 {ChunkIdx: 9 Offset: 101216256 Size: 32999216} 749980 commit chunk# 10 {ChunkIdx: 10 Offset: 101208064 Size: 33009644} 750217 commit chunk# 11 {ChunkIdx: 11 Offset: 101216256 Size: 32997852} 749949 commit chunk# 12 {ChunkIdx: 12 Offset: 101224448 Size: 32993276} 749845 commit chunk# 13 {ChunkIdx: 13 Offset: 101208064 Size: 33006344} 750142 commit chunk# 14 {ChunkIdx: 14 Offset: 101224448 Size: 32991560} 749806 commit chunk# 15 {ChunkIdx: 15 Offset: 101236736 Size: 32978360} 749506 commit chunk# 16 {ChunkIdx: 16 Offset: 101208064 Size: 33006300} 750141 commit chunk# 17 {ChunkIdx: 17 Offset: 101216256 Size: 33001460} 750031 commit chunk# 18 {ChunkIdx: 18 Offset: 101232640 Size: 32982980} 749611 commit chunk# 19 {ChunkIdx: 19 Offset: 101240832 Size: 32976864} 749472 commit chunk# 20 {ChunkIdx: 20 Offset: 101220352 Size: 32997016} 749930 commit chunk# 21 {ChunkIdx: 21 Offset: 101220352 Size: 32997368} 749938 commit chunk# 22 {ChunkIdx: 22 Offset: 101228544 Size: 32989184} 749752 commit chunk# 23 {ChunkIdx: 23 Offset: 101203968 Size: 33010392} 750234 commit chunk# 24 {ChunkIdx: 24 Offset: 101216256 Size: 33001460} 750031 commit chunk# 25 {ChunkIdx: 25 Offset: 101220352 Size: 32996444} 749917 commit chunk# 26 {ChunkIdx: 26 Offset: 101224448 Size: 32992440} 749826 commit chunk# 27 {ChunkIdx: 27 Offset: 101228544 Size: 32985180} 749661 commit chunk# 28 {ChunkIdx: 28 Offset: 101236736 Size: 32980736} 749560 commit chunk# 29 {ChunkIdx: 29 Offset: 101220352 Size: 32996312} 749914 commit chunk# 30 {ChunkIdx: 30 Offset: 101224448 Size: 32991428} 749803 commit chunk# 31 {ChunkIdx: 31 Offset: 101212160 Size: 33005552} 750124 commit chunk# 32 {ChunkIdx: 32 Offset: 101208064 Size: 33007004} 750157 commit chunk# 33 {ChunkIdx: 33 Offset: 101253120 Size: 32964588} 749193 commit chunk# 34 {ChunkIdx: 34 Offset: 101240832 Size: 32976820} 749471 commit chunk# 35 {ChunkIdx: 35 Offset: 101212160 Size: 33002516} 750055 commit chunk# 36 {ChunkIdx: 36 Offset: 101236736 Size: 32980956} 749565 commit chunk# 37 {ChunkIdx: 37 Offset: 101228544 Size: 32989184} 749752 commit chunk# 38 {ChunkIdx: 38 Offset: 101212160 Size: 33003396} 750075 commit chunk# 39 {ChunkIdx: 39 Offset: 101212160 Size: 33001460} 750031 commit chunk# 40 {ChunkIdx: 40 Offset: 101228544 Size: 32989184} 749752 commit chunk# 41 {ChunkIdx: 41 Offset: 101236736 Size: 32980076} 749545 commit chunk# 42 {ChunkIdx: 42 Offset: 101212160 Size: 33003880} 750086 commit chunk# 43 {ChunkIdx: 43 Offset: 101212160 Size: 33003572} 750079 commit chunk# 44 {ChunkIdx: 44 Offset: 101228544 Size: 32987644} 749717 commit chunk# 45 {ChunkIdx: 45 Offset: 101240832 Size: 32976864} 749472 commit chunk# 46 {ChunkIdx: 46 Offset: 101216256 Size: 32997984} 749952 commit chunk# 47 {ChunkIdx: 47 Offset: 101228544 Size: 32989184} 749752 commit chunk# 48 {ChunkIdx: 48 Offset: 101236736 Size: 32977304} 749482 commit chunk# 49 {ChunkIdx: 49 Offset: 101224448 Size: 32993276} 749845 commit chunk# 50 {ChunkIdx: 50 Offset: 101195776 Size: 33018884} 750427 commit chunk# 51 {ChunkIdx: 51 Offset: 101244928 Size: 32971232} 749344 commit chunk# 52 {ChunkIdx: 52 Offset: 101212160 Size: 33001944} 750042 commit chunk# 53 {ChunkIdx: 53 Offset: 101240832 Size: 32976864} 749472 commit chunk# 54 {ChunkIdx: 54 Offset: 101232640 Size: 32984784} 749652 commit chunk# 55 {ChunkIdx: 55 Offset: 101224448 Size: 32991208} 749798 commit chunk# 56 {ChunkIdx: 56 Offset: 101228544 Size: 32986500} 749691 commit chunk# 57 {ChunkIdx: 57 Offset: 101232640 Size: 32981132} 749569 commit chunk# 58 {ChunkIdx: 58 Offset: 101228544 Size: 32989184} 749752 commit chunk# 59 {ChunkIdx: 59 Offset: 101253120 Size: 32960760} 749106 commit chunk# 60 {ChunkIdx: 60 Offset: 101216256 Size: 33001460} 750031 commit chunk# 61 {ChunkIdx: 61 Offset: 101232640 Size: 32981132} 749569 commit chunk# 62 {ChunkIdx: 62 Offset: 101232640 Size: 32985048} 749658 commit chunk# 63 {ChunkIdx: 63 Offset: 101195776 Size: 33021920} 750496 commit chunk# 64 {ChunkIdx: 64 Offset: 101195776 Size: 33018136} 750410 commit chunk# 65 {ChunkIdx: 65 Offset: 101240832 Size: 32976336} 749460 commit chunk# 66 {ChunkIdx: 66 Offset: 101216256 Size: 32998732} 749969 commit chunk# 67 {ChunkIdx: 67 Offset: 101265408 Size: 32952048} 748908 commit chunk# 68 {ChunkIdx: 68 Offset: 101232640 Size: 32985048} 749658 commit chunk# 69 {ChunkIdx: 69 Offset: 101220352 Size: 32993848} 749858 commit chunk# 70 {ChunkIdx: 70 Offset: 101224448 Size: 32989976} 749770 commit chunk# 71 {ChunkIdx: 71 Offset: 101228544 Size: 32985840} 749676 commit chunk# 72 {ChunkIdx: 72 Offset: 101191680 Size: 33026012} 750589 commit chunk# 73 {ChunkIdx: 73 Offset: 101208064 Size: 33006520} 750146 commit chunk# 74 {ChunkIdx: 74 Offset: 101203968 Size: 33012240} 750276 commit chunk# 75 {ChunkIdx: 75 Offset: 101208064 Size: 33005904} 750132 commit chunk# 76 {ChunkIdx: 76 Offset: 101220352 Size: 32997368} 749938 commit chunk# 77 {ChunkIdx: 77 Offset: 101208064 Size: 33007136} 750160 commit chunk# 78 {ChunkIdx: 78 Offset: 101224448 Size: 32990592} 749784 commit chunk# 79 {ChunkIdx: 79 Offset: 101224448 Size: 32992484} 749827 commit chunk# 80 {ChunkIdx: 80 Offset: 101236736 Size: 32980956} 749565 commit chunk# 81 {ChunkIdx: 81 Offset: 101232640 Size: 32981044} 749567 commit chunk# 82 {ChunkIdx: 82 Offset: 101203968 Size: 33013252} 750299 commit chunk# 83 {ChunkIdx: 83 Offset: 101208064 Size: 33009644} 750217 commit chunk# 84 {ChunkIdx: 84 Offset: 101228544 Size: 32985972} 749679 commit chunk# 85 {ChunkIdx: 85 Offset: 101212160 Size: 33005552} 750124 commit chunk# 86 {ChunkIdx: 86 Offset: 101228544 Size: 32987600} 749716 commit chunk# 87 {ChunkIdx: 87 Offset: 101216256 Size: 32998380} 749961 commit chunk# 88 {ChunkIdx: 88 Offset: 101208064 Size: 33009644} 750217 commit chunk# 89 {ChunkIdx: 89 Offset: 101220352 Size: 32995388} 749893 commit chunk# 90 {ChunkIdx: 90 Offset: 101220352 Size: 32993760} 749856 commit chunk# 91 {ChunkIdx: 91 Offset: 101216256 Size: 33000800} 750016 commit chunk# 92 {ChunkIdx: 92 Offset: 101253120 Size: 32962344} 749142 commit chunk# 93 {ChunkIdx: 93 Offset: 101236736 Size: 32979416} 749530 commit chunk# 94 {ChunkIdx: 94 Offset: 101261312 Size: 32955040} 748976 commit chunk# 95 {ChunkIdx: 95 Offset: 101244928 Size: 32971188} 749343 commit chunk# 96 {ChunkIdx: 96 Offset: 101244928 Size: 32969164} 749297 commit chunk# 97 {ChunkIdx: 97 Offset: 101228544 Size: 32986632} 749694 commit chunk# 98 {ChunkIdx: 98 Offset: 101220352 Size: 32994288} 749868 commit chunk# 99 {ChunkIdx: 99 Offset: 101212160 Size: 33001680} 750036 commit chunk# 100 {ChunkIdx: 100 Offset: 101199872 Size: 33016464} 750372 commit chunk# 101 {ChunkIdx: 101 Offset: 101232640 Size: 32985048} 749658 commit chunk# 102 {ChunkIdx: 102 Offset: 101232640 Size: 32982716} 749605 commit chunk# 103 {ChunkIdx: 103 Offset: 101203968 Size: 33013692} 750309 commit chunk# 104 {ChunkIdx: 104 Offset: 101232640 Size: 32981792} 749584 commit chunk# 105 {ChunkIdx: 105 Offset: 101224448 Size: 32993276} 749845 commit chunk# 106 {ChunkIdx: 106 Offset: 101236736 Size: 32980912} 749564 commit chunk# 107 {ChunkIdx: 107 Offset: 101212160 Size: 33005552} 750124 commit chunk# 108 {ChunkIdx: 108 Offset: 101224448 Size: 32989932} 749769 commit chunk# 109 {ChunkIdx: 109 Offset: 101216256 Size: 33000448} 750008 commit chunk# 110 {ChunkIdx: 110 Offset: 101224448 Size: 32993276} 749845 commit chunk# 111 {ChunkIdx: 111 Offset: 101228544 Size: 32989184} 749752 commit chunk# 112 {ChunkIdx: 112 Offset: 101236736 Size: 32976908} 749473 commit chunk# 113 {ChunkIdx: 113 Offset: 101240832 Size: 32976864} 749472 commit chunk# 114 {ChunkIdx: 114 Offset: 101208064 Size: 33009644} 750217 commit chunk# 115 {ChunkIdx: 115 Offset: 101244928 Size: 32972156} 749365 commit chunk# 116 {ChunkIdx: 116 Offset: 101240832 Size: 32973696} 749400 commit chunk# 117 {ChunkIdx: 117 Offset: 101240832 Size: 32974180} 749411 commit chunk# 118 {ChunkIdx: 118 Offset: 101208064 Size: 33006344} 750142 commit chunk# 119 {ChunkIdx: 119 Offset: 101244928 Size: 32972332} 749369 >> KqpRm::SnapshotSharingByExchanger [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1 [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 |65.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |65.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3of4 [GOOD] |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasure4Plus2Block [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2 >> TxUsage::WriteToTopic_Demo_15 [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-07-08T11:59:21.105614Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Bootstrap 2025-07-08T11:59:21.135445Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] Become StateWork (SchemeCache [2:187:2106]) 2025-07-08T11:59:21.135593Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Bootstrap 2025-07-08T11:59:21.136903Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] Become StateWork (SchemeCache [1:190:2159]) 2025-07-08T11:59:21.154070Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:21.156099Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:21.156148Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-07-08T11:59:21.156479Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:21.156650Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-07-08T11:59:21.157074Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2043} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:21.157087Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:498} Handle TEvInterconnect::TEvNodesInfo 2025-07-08T11:59:21.157118Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-07-08T11:59:21.159231Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-07-08T11:59:21.159327Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-07-08T11:59:21.159342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-07-08T11:59:21.159389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:21.159400Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-07-08T11:59:21.159416Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:21.182379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-07-08T11:59:21.182425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:21.193294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-07-08T11:59:21.193343Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:21.193360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-07-08T11:59:21.193372Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:21.193397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-07-08T11:59:21.193405Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:21.193411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-07-08T11:59:21.193418Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:21.204221Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-07-08T11:59:21.204267Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:21.221199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-07-08T11:59:21.221263Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-07-08T11:59:21.221447Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-07-08T11:59:21.221455Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2174} LoadFinished 2025-07-08T11:59:21.223150Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-07-08T11:59:21.223167Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-07-08T11:59:21.223500Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-07-08T11:59:21.223786Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-07-08T11:59:21.223850Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat 2025-07-08T11:59:21.223858Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat 2025-07-08T11:59:21.224028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-07-08T11:59:21.224111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-07-08T11:59:21.224134Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:21.224147Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:21.224176Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-07-08T11:59:21.224199Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-07-08T11:59:21.227719Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:21.227786Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-07-08T11:59:21.241476Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-07-08T11:59:21.241748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-07-08T11:59:21.243741Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-07-08T11:59:21.243874Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2747} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-07-08T11:59:21.244006Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/43nv/001261/r3tmp/tmpuJKlUU/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5094368085157871270 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-07-08T11:59:21.244143Z node 2 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:21.244180Z node 2 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:21.244185Z node 1 :TENANT_POOL DEBUG: TTenantPool::Bootstrap 2025-07-08T11:59:21.244214Z node 1 :LOCAL DEBUG: TLocal::Bootstrap 2025-07-08T11:59:21.244222Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:21.244242Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:21.244255Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:21.244258Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) Bootstrap 2025-07-08T11:59:21.244261Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-07-08T11:59:21.244267Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Bootstrap 2025-07-08T11:59:21.244320Z node 1 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:21.244326Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:21.244329Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:21.244343Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:418:2306] 2025-07-08T11:59:21.244378Z node 2 :LOCAL DEBUG: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-07-08T11:59:21.244382Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Bootstrap 2025-07-08T11:59:21.244384Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister 2025-07-08T11:59:21.244390Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:420:2116] 2025-07-08T11:59:21.244832Z node 1 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:21.244836Z node 1 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [1:407:2302] 2025-07-08T11:59:21.244854Z node 2 :TENANT_POOL NOTICE: TDomainTenantPool(dc-1) started tenant /dc-1 2025-07-08T11:59:21.244857Z node 2 :TENANT_POOL DEBUG: TDomainTenantPool(dc-1) send status update to [2:408:2112] 2025-07-08T11:59:21.245026Z node 1 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:418:2306]} 2025-07-08T11:59:21.245035Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:21.245041Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:21.245043Z node 1 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:21.245128Z node 2 :LOCAL DEBUG: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:420:2116]} 2025-07-08T11:59:21.245172Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-07-08T11:59:21.245176Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar TEvPing - CONNECTED 2025-07-08T11:59:21.245178Z node 2 :LOCAL DEBUG: TLocalNodeRegistrar SendStatusOk 2025-07-08T11:59:21.263501Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:21.263522Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:21.263527Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-07-08T11:59:21.263533Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-07-08T11:59:21.276521Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-07-08T11:59:21.308109Z node 1 :TX_PROXY DEBUG: actor# [1:179:2153] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:21.309397Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-07-08T11:59:21.309476Z node 2 :TX_PROXY DEBUG: actor# [2:180:2103] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:21.309916Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:59:21.342895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal } } 2025-07-08T11:59:21.384228Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError } } 2025-07-08T11:59:21.425391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green } } 2025-07-08T11:59:21.743297Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-07-08T11:59:21.743931Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-07-08T11:59:21.743998Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1 [GOOD] |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |65.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasure4Plus2Block [GOOD] |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |65.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TxUsage::WriteToTopic_Demo_16 |65.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest |65.2%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |65.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |65.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |65.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> BasicUsage::SessionNotDestroyedWhileUserEventHandlingInFlight [GOOD] |65.3%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-07-08T11:59:26.526660Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:26.527602Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-07-08T11:59:26.527616Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-07-08T11:59:26.527632Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-07-08T11:59:26.527742Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-07-08T11:59:26.527749Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-07-08T11:59:26.527767Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-07-08T11:59:26.527770Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:26.527777Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-07-08T11:59:26.527780Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-07-08T11:59:26.527788Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-07-08T11:59:26.527790Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-07-08T11:59:26.527797Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-07-08T11:59:26.527803Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-07-08T11:59:26.527806Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-07-08T11:59:26.527811Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-07-08T11:59:26.527814Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:26.527817Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-07-08T11:59:26.527820Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-07-08T11:59:26.527823Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> BasicUsage::ReadSessionCorrectClose |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |65.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |65.3%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] |65.3%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] |65.3%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AggregateStatistics::ShouldBePings >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> LocalPartition::DiscoveryServiceBadPort [GOOD] >> LocalPartition::DiscoveryServiceBadNodeId >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-07-08T11:59:27.503057Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.504154Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-07-08T11:59:27.504231Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504249Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-07-08T11:59:27.504255Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504263Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-07-08T11:59:27.504268Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504273Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-07-08T11:59:27.504277Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504287Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-07-08T11:59:27.504309Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-07-08T11:59:27.504314Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504321Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-07-08T11:59:27.504334Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-07-08T11:59:27.504337Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.504343Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-07-08T11:59:27.504348Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504353Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-07-08T11:59:27.504359Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-07-08T11:59:27.504362Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.504366Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-07-08T11:59:27.504370Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.504376Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-07-08T11:59:27.504379Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.504382Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-07-08T11:59:27.504388Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-07-08T11:59:27.504392Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.515602Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-07-08T11:59:27.515623Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-07-08T11:59:27.515647Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-07-08T11:59:27.515652Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-07-08T11:59:27.515656Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-07-08T11:59:27.515661Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-07-08T11:59:27.515667Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-07-08T11:59:27.515671Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-07-08T11:59:27.515674Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-07-08T11:59:27.515677Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.515694Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.515697Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-07-08T11:59:27.515710Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-07-08T11:59:27.515714Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.515719Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-07-08T11:59:27.515721Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.515726Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-07-08T11:59:27.515728Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-07-08T11:58:53.265054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.267894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.267947Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.268473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.268516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.268554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.268568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.268579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.268594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.268607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.268618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.268629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.268640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.268651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.268664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.273483Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.273707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.273719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.273760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.273806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.273818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.273822Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.273829Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.273835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.273840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.273843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.273866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.273874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.273881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.273886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.273895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.273900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.273905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.273908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.273914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.273919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.273922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.273939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.273944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.273949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.273972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.273980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.273984Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.273992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.273997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.274000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.274005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.274010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.274014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.274017Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.274048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:53.274059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-07-08T11:58:53.274065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:58:53.274074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:53.274081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.274089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.274094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.274098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.274106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.274119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... _portion=184; 2025-07-08T11:59:25.304470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=68;sum=787448;count=14327; 2025-07-08T11:59:25.304478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=180;sum=1475192;count=14328;size_of_meta=112; 2025-07-08T11:59:25.304484Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1991000;count=7164;size_of_portion=184; 2025-07-08T11:59:25.304555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7800; 2025-07-08T11:59:25.304567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:25.304723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=148; 2025-07-08T11:59:25.304731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7997; 2025-07-08T11:59:25.304735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8008; 2025-07-08T11:59:25.304743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:25.304807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=57; 2025-07-08T11:59:25.304814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8134; 2025-07-08T11:59:25.304849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=26; 2025-07-08T11:59:25.304866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-07-08T11:59:25.304926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=54; 2025-07-08T11:59:25.305021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=88; 2025-07-08T11:59:25.311003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5961; 2025-07-08T11:59:25.318568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=7522; 2025-07-08T11:59:25.318610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-07-08T11:59:25.318617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:25.318625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-07-08T11:59:25.318641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-07-08T11:59:25.318649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:25.318667Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-07-08T11:59:25.318673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:25.318688Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:25.318706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-07-08T11:59:25.318721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=10; 2025-07-08T11:59:25.318726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=22959; 2025-07-08T11:59:25.318770Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22538992;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=147791880;raw_bytes=143975050;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:25.318807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:25.318817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:25.318833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:25.318842Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:25.318879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:25.318895Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:25.318910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975638817;tx_id=18446744073709551615;;current_snapshot_ts=1751975935092; 2025-07-08T11:59:25.318918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:25.318927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:25.318932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:25.318956Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:25.320503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:25.320730Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:25.320740Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:25.320744Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:25.320750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:25.320769Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:25.320779Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975638817;tx_id=18446744073709551615;;current_snapshot_ts=1751975935092; 2025-07-08T11:59:25.320786Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:25.320795Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:25.320800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:25.320815Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:25.320822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |65.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-07-08T11:59:27.782080Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.782286Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-07-08T11:59:27.782345Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782392Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.782408Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-07-08T11:59:27.782414Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782477Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.782496Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-07-08T11:59:27.782501Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782526Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [3:46:2057], tablet id = 5, status = OK 2025-07-08T11:59:27.782532Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782539Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [2:44:2057], tablet id = 4, status = OK 2025-07-08T11:59:27.782543Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782555Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-07-08T11:59:27.782580Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-07-08T11:59:27.782586Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.782607Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-07-08T11:59:27.782615Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-07-08T11:59:27.782624Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:46:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-07-08T11:59:27.782627Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782631Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-07-08T11:59:27.782647Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-07-08T11:59:27.782662Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:44:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-07-08T11:59:27.782665Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782668Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-07-08T11:59:27.782671Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782678Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-07-08T11:59:27.782681Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782688Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-07-08T11:59:27.782694Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.782701Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-07-08T11:59:27.782707Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782729Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-07-08T11:59:27.782739Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-07-08T11:59:27.782743Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-07-08T11:59:27.782752Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-07-08T11:59:27.782755Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.782768Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-07-08T11:59:27.782773Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.782787Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-07-08T11:59:27.782792Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-07-08T11:59:27.647897Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.648938Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.761539Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-07-08T11:59:27.761575Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-07-08T11:59:27.761582Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.761759Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-07-08T11:59:27.761765Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.761774Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-07-08T11:59:27.761776Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.761786Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-07-08T11:59:27.761791Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-07-08T11:59:27.475335Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.475545Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-07-08T11:59:27.475610Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.475663Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.475691Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:27.475709Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-07-08T11:59:27.475726Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2025-07-08T11:59:27.475733Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.475742Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-07-08T11:59:27.475745Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.475749Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-07-08T11:59:27.475753Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.475773Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-07-08T11:59:27.475783Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-07-08T11:59:27.475786Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.475796Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-07-08T11:59:27.475801Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:27.475814Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-07-08T11:59:27.475823Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-07-08T11:59:27.475827Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-07-08T11:59:27.475837Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-07-08T11:59:27.475840Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:27.475852Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-07-08T11:59:27.489080Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.489103Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:27.489126Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.489131Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:27.501044Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-07-08T11:59:27.501073Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-07-08T11:59:27.501080Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:27.501100Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.501104Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-07-08T11:59:27.501112Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.501116Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:27.501143Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:27.501147Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-07-08T11:59:28.099106Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:28.099320Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-07-08T11:59:28.099388Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:28.099441Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:28.099470Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-07-08T11:59:28.099500Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-07-08T11:59:28.099520Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 3, status = OK 2025-07-08T11:59:28.099526Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:28.099535Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-07-08T11:59:28.099539Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:28.099543Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-07-08T11:59:28.099547Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:28.099567Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-07-08T11:59:28.099577Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-07-08T11:59:28.099580Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:28.099590Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-07-08T11:59:28.099596Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-07-08T11:59:28.099609Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-07-08T11:59:28.099618Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-07-08T11:59:28.099622Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-07-08T11:59:28.099631Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-07-08T11:59:28.099634Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-07-08T11:59:28.099644Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-07-08T11:59:28.110264Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:28.110289Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:28.110315Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:28.110320Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:28.121484Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-07-08T11:59:28.121521Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-07-08T11:59:28.121528Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-07-08T11:59:28.121555Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:28.121558Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-07-08T11:59:28.121569Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:28.121573Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-07-08T11:59:28.121609Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-07-08T11:59:28.121613Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] |65.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |65.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/repl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |65.4%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-07-08T11:59:05.142727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.146550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.146611Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.147272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.147340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.147377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.147398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.147413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.147431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.147448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.147467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.147484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.147501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.147519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.147534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.154683Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.154926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.154942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.154972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.155016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.155031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.155037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.155046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.155055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.155062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.155066Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.155089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.155098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.155105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.155109Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.155118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.155137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.155146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.155150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.155158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.155165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.155169Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.155194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.155202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.155206Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.155236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.155243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.155247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.155261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.155268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.155272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.155280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.155287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.155296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.155301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.155346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-07-08T11:59:05.155366Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=15; 2025-07-08T11:59:05.155375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:05.155388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=9; 2025-07-08T11:59:05.155399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.155411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.155419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.155425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.155438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.155444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... nge:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7503120;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););; 2025-07-08T11:59:27.680819Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:27.680834Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:27.687973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:27.689087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:27.689103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:27.784406Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:27.784455Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7503120;count=812;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:28.084349Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:28.084431Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:28.084444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=7504840; 2025-07-08T11:59:28.084470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=95658;count=1749; 2025-07-08T11:59:28.084480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=179658;count=1750;size_of_meta=112; 2025-07-08T11:59:28.084492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=242658;count=875;size_of_portion=184; 2025-07-08T11:59:28.084586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:28.084672Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:28.117046Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:28.117503Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 2025-07-08T11:59:28.118627Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=37548672;raw_bytes=36867050;count=5;records=375200} inactive {blob_bytes=110272840;raw_bytes=107127800;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:28.313342Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:28.313363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:28.313373Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=with_appended.cpp:65;portions=222,;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:28.313507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::fe5ddb64-5bf211f0-86eee84a-ab241cb6; 2025-07-08T11:59:28.313529Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:22538992;portions_count:222;); 2025-07-08T11:59:28.313536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:28.313555Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:28.313569Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975648676;tx_id=18446744073709551615;;current_snapshot_ts=1751975946755; 2025-07-08T11:59:28.313577Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:28.313585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:28.313589Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:28.313619Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.869000s; 2025-07-08T11:59:28.313627Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fe5ddb64-5bf211f0-86eee84a-ab241cb6;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:28.313797Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 >> TOlap::CreateTableWithNullableKeysNotAllowed >> TOlap::CreateDropStandaloneTable >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> TSchemeShardUserAttrsTest::Boot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-07-08T11:59:05.952776Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:05.956694Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:05.956760Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:05.957507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:05.957568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:05.957606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:05.957625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:05.957642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:05.957681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:05.957698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:05.957715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:05.957731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:05.957748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.957765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:05.957782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:05.963953Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:05.964121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:05.964131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:05.964160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.964202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:05.964216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:05.964222Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:05.964231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:05.964239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:05.964246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:05.964250Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:05.964272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:05.964279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:05.964285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:05.964289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:05.964299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:05.964306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:05.964313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:05.964317Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:05.964325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:05.964332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:05.964336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:05.964359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:05.964366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:05.964370Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:05.964389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:05.964396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:05.964400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:05.964412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:05.964419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.964422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:05.964430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:05.964437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:05.964443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:05.964447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:05.964485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:59:05.964498Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=10; 2025-07-08T11:59:05.964507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:05.964518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:59:05.964527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:05.964538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:05.964545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:05.964550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:05.964563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:05.964568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... nge:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7503120;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7504840;index_size:0;meta:(()););; 2025-07-08T11:59:28.325237Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:28.325254Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5961:7955];task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.325930Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:28.327004Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:28.327015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.415551Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:28.415598Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7503120;count=812;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:28.665587Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:28.665645Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:28.665659Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7369450;count=1;packed=7504840; 2025-07-08T11:59:28.665681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=95658;count=1749; 2025-07-08T11:59:28.665690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=190;data_size=180;sum=179658;count=1750;size_of_meta=112; 2025-07-08T11:59:28.665702Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=262;data_size=252;sum=242658;count=875;size_of_portion=184; 2025-07-08T11:59:28.665797Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.665852Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:28.732366Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5961:7955];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.732738Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 2025-07-08T11:59:28.733729Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=37548672;raw_bytes=36867050;count=5;records=375200} inactive {blob_bytes=110272840;raw_bytes=107127800;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:28.954018Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.954039Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:28.954050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=with_appended.cpp:65;portions=222,;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.954161Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::ff03f652-5bf211f0-99cb3ad6-6366f174; 2025-07-08T11:59:28.954175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:22538992;portions_count:222;); 2025-07-08T11:59:28.954182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:28.954200Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:28.954211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975649486;tx_id=18446744073709551615;;current_snapshot_ts=1751975947565; 2025-07-08T11:59:28.954219Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:28.954227Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:28.954232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:28.954247Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.869000s; 2025-07-08T11:59:28.954256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ff03f652-5bf211f0-99cb3ad6-6366f174;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:28.954290Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 666 >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps |65.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |65.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |65.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> TSchemeShardUserAttrsTest::Boot [GOOD] >> TOlap::CreateTableWithNullableKeys [GOOD] >> DataShardVolatile::DistributedWrite |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |65.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |65.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> TSchemeShardUserAttrsTest::VariousUse [GOOD] |65.4%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:30.774599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:30.774624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.774629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:30.774634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:30.774640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:30.774644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:30.774656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.774669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:30.774740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:30.788141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:30.788162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:30.791998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:30.792061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:30.792088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:30.793639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:30.793682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:30.793772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.793965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:30.794821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.794855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:30.795070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.795078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.795094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:30.795100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.795106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:30.795130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.796265Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:30.815372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:30.815448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.815505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:30.815556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:30.815565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.816328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.816351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:30.816393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.816401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:30.816406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:30.816411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:30.816742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.816750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:30.816755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:30.817045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.817054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.817059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.817065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.817624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:30.817966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:30.818004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:30.818167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.818185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:30.818194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.818255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:30.818261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.818286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:30.818297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:30.818676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.818682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.818723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.818728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:30.818737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.818743Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:30.818754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.818758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.818763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.818766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.818770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:30.818775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.818780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:30.818784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:30.818792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:30.818797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:30.818802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:30.819150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:30.819163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:30.819168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-07-08T11:59:30.819173Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-07-08T11:59:30.819181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:30.819193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-07-08T11:59:30.819762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-07-08T11:59:30.819846Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:30.136321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:30.136345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.136353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:30.136359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:30.136376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:30.136379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:30.136391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.136404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:30.136478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:30.151295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:30.151317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:30.155038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:30.155092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:30.155124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:30.156277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:30.156317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:30.156423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.156573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:30.157235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.157270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:30.157476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.157484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.157499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:30.157506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.157511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:30.157536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.158620Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:30.175747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:30.175828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.175893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:30.175932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:30.175941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.176631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.176654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:30.176696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.176704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:30.176709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:30.176716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:30.177004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.177013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:30.177017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:30.177263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.177269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.177275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.177281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.177800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:30.178073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:30.178111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:30.178281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.178301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:30.178307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.178370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:30.178376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.178404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:30.178416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:30.178745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.178751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.178789Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.178794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:30.178803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.178810Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:30.178821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.178825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.178830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.178833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.178837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:30.178842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.178846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:30.178850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:30.178859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:30.178864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:30.178869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:30.179206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:30.179217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... h id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:30.855563Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.855567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-07-08T11:59:30.855572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T11:59:30.855576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 4 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-07-08T11:59:30.855716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.855722Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-07-08T11:59:30.855729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-07-08T11:59:30.855829Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.855841Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.855846Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:59:30.855851Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:30.855856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T11:59:30.855925Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.855933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.855937Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:59:30.855941Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T11:59:30.855944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:30.856234Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.856247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:30.856251Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:59:30.856255Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-07-08T11:59:30.856260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:30.856272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-07-08T11:59:30.856457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-07-08T11:59:30.856775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:59:30.856843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:59:30.857015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:59:30.869775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-07-08T11:59:30.869800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-07-08T11:59:30.869821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-07-08T11:59:30.869836Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-07-08T11:59:30.870269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.870311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.870318Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:30.870339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T11:59:30.870363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:59:30.870368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:30.870373Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:59:30.870376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:30.870382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-07-08T11:59:30.870395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:357:2335] message: TxId: 104 2025-07-08T11:59:30.870402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:30.870407Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T11:59:30.870412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T11:59:30.870439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:30.870947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T11:59:30.870989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T11:59:30.870995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:615:2573] 2025-07-08T11:59:30.871083Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T11:59:30.925633Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[2:460:2428];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:863;event=tablet_die; 2025-07-08T11:59:30.926389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T11:59:30.926596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-07-08T11:59:30.926742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:30.926748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:30.926761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:30.940526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T11:59:30.940558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T11:59:30.942077Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-07-08T11:59:30.942227Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:30.942282Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 70us result status StatusPathDoesNotExist 2025-07-08T11:59:30.942341Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:30.942422Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-07-08T11:59:30.942435Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 12us result status StatusPathDoesNotExist 2025-07-08T11:59:30.942443Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |65.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> TGRpcCmsTest::RemoveWithAnotherTokenTest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:31.001589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:31.001613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:31.001618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:31.001623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:31.001629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:31.001634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:31.001647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:31.001661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:31.001730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:31.036348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:31.036371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:31.040703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:31.040770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:31.040801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:31.046184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:31.046232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:31.046333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.046481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:31.047230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.047264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:31.047478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.047487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.047504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:31.047511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.047517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:31.047556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.053171Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:31.094985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:31.095058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.095119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:31.095161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:31.095171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.101295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.101332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:31.101387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.101398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:31.101403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:31.101409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:31.102000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.102011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:31.102016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:31.102330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.102339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.102345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.102353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.103012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:31.103441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:31.103480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:31.103682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.103704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:31.103714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.103780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:31.103788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.103817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:31.103829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:31.104209Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.104216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.104254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.104260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:31.104269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.104276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:31.104288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:31.104292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.104297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:31.104301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.104306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:31.104311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.104316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:31.104320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:31.104330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:31.104336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:31.104340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:31.104724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:31.104737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 1/1 2025-07-08T11:59:31.182443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-07-08T11:59:31.182451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:31.182458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:31.182464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:31.182469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-07-08T11:59:31.182487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-07-08T11:59:31.182491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-07-08T11:59:31.182495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-07-08T11:59:31.182502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:31.182510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-07-08T11:59:31.182514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-07-08T11:59:31.182517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-07-08T11:59:31.182521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:31.182648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-07-08T11:59:31.182912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.183090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.183112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:31.183121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:31.183142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.183147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-07-08T11:59:31.183154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-07-08T11:59:31.183158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-07-08T11:59:31.183271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-07-08T11:59:31.183289Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T11:59:31.183293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:31.183340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-07-08T11:59:31.183355Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-07-08T11:59:31.183359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:31.183462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-07-08T11:59:31.183475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-07-08T11:59:31.183479Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:31.183483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:31.183494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-07-08T11:59:31.183514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:31.183519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:31.183541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:31.183833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-07-08T11:59:31.184027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-07-08T11:59:31.184041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-07-08T11:59:31.184244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-07-08T11:59:31.184306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-07-08T11:59:31.184311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-07-08T11:59:31.184388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-07-08T11:59:31.184401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-07-08T11:59:31.184406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:501:2492] TestWaitNotification: OK eventTxId 112 2025-07-08T11:59:31.184492Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:31.184510Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 26us result status StatusSuccess 2025-07-08T11:59:31.184563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-07-08T11:59:31.185141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:31.185162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.185174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:31.185555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:31.185574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 |65.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:31.221058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:31.221082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:31.221087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:31.221091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:31.221097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:31.221100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:31.221114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:31.221126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:31.221196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:31.253171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:31.253192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:31.267821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:31.267867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:31.267907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:31.281083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:31.281141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:31.281240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.281382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:31.282220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.282254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:31.282470Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.282477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.282491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:31.282497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.282502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:31.282523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.283600Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:31.353432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:31.353507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.353562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:31.353600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:31.353609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.355519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.355554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:31.355599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.355607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:31.355611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:31.355616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:31.356198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.356207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:31.356211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:31.356446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.356452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.356459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.356465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.357033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:31.369164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:31.369212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:31.369423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.369460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:31.369468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.369540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:31.369548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:31.369581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:31.369593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:31.370011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.370017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.370059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.370064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:31.370072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.370078Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:31.370089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:31.370094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.370098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:31.370101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.370104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:31.370109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:31.370115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:31.370119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:31.370129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:31.370134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:31.370138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:31.370523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:31.370534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... D DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-07-08T11:59:31.445475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:31.445495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:31.445501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T11:59:31.445525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:31.445535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:31.445539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:31.445544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:31.445546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:31.445556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:31.445563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:31.445569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-07-08T11:59:31.445575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:31.445579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T11:59:31.445582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T11:59:31.445590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T11:59:31.445595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-07-08T11:59:31.445599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T11:59:31.445602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T11:59:31.445845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:31.445947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:31.446108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:31.446134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:31.446157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:31.446161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-07-08T11:59:31.446166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-07-08T11:59:31.446252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-07-08T11:59:31.446267Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T11:59:31.446271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T11:59:31.446323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-07-08T11:59:31.446335Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T11:59:31.446338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T11:59:31.446346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-07-08T11:59:31.446391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T11:59:31.446395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T11:59:31.446402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T11:59:31.446651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:31.446847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T11:59:31.446895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T11:59:31.446901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T11:59:31.446969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:31.446981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:59:31.446988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:405:2396] TestWaitNotification: OK eventTxId 105 2025-07-08T11:59:31.447057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:31.447077Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 29us result status StatusPathDoesNotExist 2025-07-08T11:59:31.447114Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:31.447160Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T11:59:31.447173Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 14us result status StatusSuccess 2025-07-08T11:59:31.447240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlap::AlterTtl [GOOD] |65.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertBatch ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-07-08T11:59:09.878124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:09.882733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:09.882780Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:09.883527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:09.883589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:09.883624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:09.883647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:09.883665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:09.883686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:09.883705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:09.883724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:09.883743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:09.883760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.883778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:09.883797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:09.890238Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:09.890433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:09.890443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:09.890471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.890514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:09.890529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:09.890535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:09.890545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:09.890554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:09.890561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:09.890566Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:09.890588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.890597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:09.890604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:09.890609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:09.890619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:09.890626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:09.890633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:09.890638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:09.890647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:09.890654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:09.890659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:09.890684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:09.890692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:09.890697Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:09.890719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:09.890728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:09.890732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:09.890746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:09.890753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.890758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.890767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:09.890775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:09.890782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:09.890786Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:09.890821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:09.890835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=9; 2025-07-08T11:59:09.890844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:09.890855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:09.890866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:09.890878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:09.890885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:09.890890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:09.890904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:09.890910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... 50;count=873;size_of_portion=184; 2025-07-08T11:59:31.737718Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=62;sum=95396;count=1747; 2025-07-08T11:59:31.737727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=174;sum=179300;count=1748;size_of_meta=112; 2025-07-08T11:59:31.737732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=242228;count=874;size_of_portion=184; 2025-07-08T11:59:31.737803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=8874; 2025-07-08T11:59:31.737827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:31.737967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=132; 2025-07-08T11:59:31.737990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9094; 2025-07-08T11:59:31.737996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9110; 2025-07-08T11:59:31.738003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:31.738067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=57; 2025-07-08T11:59:31.738072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9246; 2025-07-08T11:59:31.738106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=22; 2025-07-08T11:59:31.738127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=16; 2025-07-08T11:59:31.738200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=66; 2025-07-08T11:59:31.738246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=37; 2025-07-08T11:59:31.744878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6616; 2025-07-08T11:59:31.752436Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=7522; 2025-07-08T11:59:31.752464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-07-08T11:59:31.752473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:31.752480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-07-08T11:59:31.752499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=15; 2025-07-08T11:59:31.752505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:31.752524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=14; 2025-07-08T11:59:31.752531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T11:59:31.752544Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T11:59:31.752559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=10; 2025-07-08T11:59:31.752574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=10; 2025-07-08T11:59:31.752579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=24951; 2025-07-08T11:59:31.752617Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22744072;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=149450960;raw_bytes=145316940;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:31.752648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:31.752656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:31.752670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:31.752678Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:31.752730Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:31.752745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:31.752763Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653653;tx_id=18446744073709551615;;current_snapshot_ts=1751975951490; 2025-07-08T11:59:31.752770Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:31.752778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:31.752783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:31.752803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:31.755245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:31.755863Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:31.755876Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:31.755880Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:31.755886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:31.755909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:31.755920Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653653;tx_id=18446744073709551615;;current_snapshot_ts=1751975951490; 2025-07-08T11:59:31.755927Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:31.755937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:31.755942Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:31.755958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:31.755966Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6272:8263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:30.658586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:30.658607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.658616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:30.658621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:30.658636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:30.658640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:30.658652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:30.658664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:30.658732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:30.671442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:30.671459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:30.674908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:30.674959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:30.674999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:30.676400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:30.676444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:30.676563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.676723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:30.677542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.677591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:30.677781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.677791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.677808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:30.677814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.677820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:30.677845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.679060Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:30.696773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:30.696838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.696894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:30.696933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:30.696944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.697505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.697546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:30.697588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.697598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:30.697603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:30.697608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:30.697995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.698008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:30.698014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:30.698377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.698390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.698397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.698405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.698951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:30.699328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:30.699372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:30.699565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:30.699601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:30.699611Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.699677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:30.699685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:30.699714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:30.699727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:30.700154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:30.700163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:30.700205Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:30.700210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:30.700221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:30.700228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:30.700240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.700244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.700248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:30.700252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.700257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:30.700263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:30.700268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:30.700272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:30.700283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:30.700290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:30.700294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:30.700674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:30.700691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... cationSubscriber, SendToSchemeshard, txId 106 2025-07-08T11:59:32.218368Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 106, at schemeshard: 72057594046678944 2025-07-08T11:59:32.218374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-07-08T11:59:32.218379Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 106, at schemeshard: 72057594046678944 2025-07-08T11:59:32.259800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-07-08T11:59:32.259833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-07-08T11:59:32.259867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-07-08T11:59:32.259875Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-07-08T11:59:32.259919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.259924Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-07-08T11:59:32.260578Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.260633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.260642Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-07-08T11:59:32.260659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-07-08T11:59:32.260699Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:32.261264Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-07-08T11:59:32.261306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-07-08T11:59:32.263104Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:215;event=finished_tx;tx_id=106; 2025-07-08T11:59:32.263181Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:32.263211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 124 RawX2: 12884904038 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:32.263220Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-07-08T11:59:32.263420Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-07-08T11:59:32.263462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:59:32.263476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-07-08T11:59:32.264555Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:32.264565Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:59:32.264620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:32.264648Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:32.264654Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-07-08T11:59:32.264660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 3 2025-07-08T11:59:32.264886Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.264897Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-07-08T11:59:32.264908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T11:59:32.265242Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-07-08T11:59:32.265258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-07-08T11:59:32.265263Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-07-08T11:59:32.265268Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-07-08T11:59:32.265275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:59:32.265532Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-07-08T11:59:32.265545Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-07-08T11:59:32.265549Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-07-08T11:59:32.265553Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-07-08T11:59:32.265557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T11:59:32.265569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-07-08T11:59:32.266305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T11:59:32.266847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-07-08T11:59:32.266972Z node 3 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-07-08T11:59:32.267042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-07-08T11:59:32.278652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-07-08T11:59:32.278680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-07-08T11:59:32.278749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-07-08T11:59:32.279377Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.279425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:32.279434Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-07-08T11:59:32.279454Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-07-08T11:59:32.279459Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:59:32.279465Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-07-08T11:59:32.279468Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:59:32.279474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-07-08T11:59:32.279490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2319] message: TxId: 106 2025-07-08T11:59:32.279499Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-07-08T11:59:32.279506Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-07-08T11:59:32.279511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-07-08T11:59:32.279556Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:32.279999Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-07-08T11:59:32.280011Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:544:2514] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> ReadLoad::ShouldReadIterate >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-07-08T11:58:53.254258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.257560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.257624Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.258363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.258425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.258457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.258473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.258485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.258498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.258510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.258522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.258532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.258544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.258556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.258574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.266388Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.266577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.266588Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.266622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.266663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.266675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.266678Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.266685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.266692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.266697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.266699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.266711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.266717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.266722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.266725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.266732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.266737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.266742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.266744Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.266750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.266756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.266758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.266776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.266782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.266784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.266799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.266805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.266807Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.266816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.266821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.266823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.266829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.266833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.266838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.266840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.266873Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:58:53.266882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:53.266889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.266897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:58:53.266905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.266914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.266919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.266923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.266932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.266936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... =58842;count=215;size_of_portion=184; 2025-07-08T11:59:32.983972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=70;sum=22832;count=431; 2025-07-08T11:59:32.983978Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=182;sum=43568;count=432;size_of_meta=112; 2025-07-08T11:59:32.983983Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=254;sum=59120;count=216;size_of_portion=184; 2025-07-08T11:59:32.983999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1524; 2025-07-08T11:59:32.984006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:32.984106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=93; 2025-07-08T11:59:32.984111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1654; 2025-07-08T11:59:32.984115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1666; 2025-07-08T11:59:32.984120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:32.984144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2025-07-08T11:59:32.984148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1744; 2025-07-08T11:59:32.984167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=13; 2025-07-08T11:59:32.984182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=10; 2025-07-08T11:59:32.984204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=18; 2025-07-08T11:59:32.984219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=10; 2025-07-08T11:59:32.985140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=910; 2025-07-08T11:59:32.986089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=928; 2025-07-08T11:59:32.986105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-07-08T11:59:32.986111Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:32.986116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:32.986129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-07-08T11:59:32.986134Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:32.986148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=9; 2025-07-08T11:59:32.986154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T11:59:32.986167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T11:59:32.986178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=6; 2025-07-08T11:59:32.986193Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-07-08T11:59:32.986197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=4726; 2025-07-08T11:59:32.986237Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108238352;raw_bytes=183045560;count=15;records=1915000} inactive {blob_bytes=205426288;raw_bytes=316809958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:32.986262Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:32.986270Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:32.986282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:32.986290Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:32.986319Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:32.986334Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:32.986349Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637386;tx_id=18446744073709551615;;current_snapshot_ts=1751975934314; 2025-07-08T11:59:32.986358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:32.986368Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:32.986372Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:32.986393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:32.988981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:32.989089Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:32.989095Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:32.989098Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:32.989106Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:32.989137Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:32.989154Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975637386;tx_id=18446744073709551615;;current_snapshot_ts=1751975934314; 2025-07-08T11:59:32.989165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:32.989177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:32.989183Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:32.989205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:32.989216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> UpsertLoad::ShouldWriteDataBulkUpsert >> ReadLoad::ShouldReadKqp ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-07-08T11:59:31.964090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679498861680333:2192];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:31.964105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0022ac/r3tmp/tmpcaE7v7/pdisk_1.dat 2025-07-08T11:59:32.143964Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9991, node 1 2025-07-08T11:59:32.205097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:32.205111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:32.205113Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:32.205168Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T11:59:32.281011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:32.281052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:32.285484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:32.289648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:6317 2025-07-08T11:59:32.337656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:59:32.361741Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7524679503156648216:2281], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" PeerName: "ipv6:[::1]:50528" } 2025-07-08T11:59:32.361765Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-07-08T11:59:32.361772Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.361775Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.361802Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" PeerName: "ipv6:[::1]:50528" 2025-07-08T11:59:32.361843Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1751975972361072) 2025-07-08T11:59:32.361940Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1751975972361072 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-07-08T11:59:32.362010Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-07-08T11:59:32.365884Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-07-08T11:59:32.366118Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972361072&action=1" } } } 2025-07-08T11:59:32.366161Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.366186Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-07-08T11:59:32.366222Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-07-08T11:59:32.366365Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-07-08T11:59:32.366391Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-07-08T11:59:32.369970Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-07-08T11:59:32.369987Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-07-08T11:59:32.370002Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7524679503156648221:2194], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-07-08T11:59:32.370005Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-07-08T11:59:32.370009Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.370011Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.370021Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-07-08T11:59:32.370026Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-07-08T11:59:32.370042Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-07-08T11:59:32.373777Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-07-08T11:59:32.373797Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.373799Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.373801Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.373828Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-07-08T11:59:32.373837Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1751975972361072 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-07-08T11:59:32.374126Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7524679503156648265:2283], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972361072&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-07-08T11:59:32.374138Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-07-08T11:59:32.374183Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972361072&action=1" } } 2025-07-08T11:59:32.376195Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-07-08T11:59:32.376237Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.376250Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-07-08T11:59:32.376253Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-07-08T11:59:32.377066Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-07-08T11:59:32.377458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-07-08T11:59:32.378223Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-07-08T11:59:32.378231Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715659 2025-07-08T11:59:32.379286Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715659 2025-07-08T11:59:32.381259Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-07-08T11:59:32.381453Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1751975972425 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user-1@builtin" ACL: "" EffectiveACL: "\n\032\010\001\020\377\377\003\032\016user-1@builtin \003(\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } D ... hemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-07-08T11:59:32.823471Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-07-08T11:59:32.823508Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-2@builtin\022\030\022\026\n\024all-users@well-known\032\016user-2@builtin\"\007Builtin*\017**** (FA717EBF)" DatabaseName: "Root" 2025-07-08T11:59:32.823735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715663 2025-07-08T11:59:32.823828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:32.828492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715663 2025-07-08T11:59:32.828668Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715663 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-07-08T11:59:32.828677Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715663 2025-07-08T11:59:32.829171Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7524679503156649063:2410], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972809193&action=2" } UserToken: "" } 2025-07-08T11:59:32.829177Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-07-08T11:59:32.829206Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972809193&action=2" } } 2025-07-08T11:59:32.829693Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715663 2025-07-08T11:59:32.839815Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715663 2025-07-08T11:59:32.839825Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-07-08T11:59:32.839836Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-07-08T11:59:32.839853Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7524679503156649043:2194], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-07-08T11:59:32.839856Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-07-08T11:59:32.839862Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.839865Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.839873Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-07-08T11:59:32.839882Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1751975972809193 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.839899Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1751975972809193 issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.840668Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-07-08T11:59:32.840703Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-07-08T11:59:32.840706Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.840749Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7524679503156647787:2193], Recipient [1:7524679503156647908:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-07-08T11:59:32.840751Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-07-08T11:59:32.840756Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.840758Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.840763Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-07-08T11:59:32.840768Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1751975972809193 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.840249Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-07-08T11:59:32.844610Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-07-08T11:59:32.844626Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.844635Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-07-08T11:59:32.844671Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-07-08T11:59:32.844836Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-07-08T11:59:32.844846Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-07-08T11:59:32.846087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-07-08T11:59:32.850454Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-07-08T11:59:32.850485Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7524679503156649104:2194], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-07-08T11:59:32.850503Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-07-08T11:59:32.850507Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.850509Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.850518Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-07-08T11:59:32.850525Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-07-08T11:59:32.849130Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-07-08T11:59:32.849146Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-07-08T11:59:32.849149Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-07-08T11:59:32.849151Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-07-08T11:59:32.849153Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-07-08T11:59:32.849156Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-07-08T11:59:32.849158Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-07-08T11:59:32.849160Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-07-08T11:59:32.849162Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-07-08T11:59:32.853058Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-07-08T11:59:32.854265Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-07-08T11:59:32.854275Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-07-08T11:59:32.854277Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.854278Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-07-08T11:59:32.854291Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1751975972809193 2025-07-08T11:59:32.854295Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1751975972809193 issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.854297Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1751975972809193 issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.854299Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-07-08T11:59:32.854316Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1751975972809193 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-07-08T11:59:32.859381Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-07-08T11:59:32.859875Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-07-08T11:59:32.859896Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-07-08T11:59:32.894656Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7524679503156649163:2416], Recipient [1:7524679503156647908:2194]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972809193&action=2" } UserToken: "" } 2025-07-08T11:59:32.894670Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-07-08T11:59:32.894730Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1751975972809193&action=2" ready: true status: SUCCESS } } 2025-07-08T11:59:32.904752Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-07-08T11:59:32.904819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-07-08T11:59:32.961812Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> TNetClassifierTest::TestInitFromFile >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-07-08T11:59:33.993382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000c3b/r3tmp/tmpmtjtrn/pdisk_1.dat 2025-07-08T11:59:34.113921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.132916Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:34.169217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:34.169259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:34.179755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:34.256919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.498411Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-07-08T11:59:34.498455Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-07-08T11:59:34.498900Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} started# 5 actors each with inflight# 4 2025-07-08T11:59:34.498909Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-07-08T11:59:34.498918Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-07-08T11:59:34.498923Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-07-08T11:59:34.498928Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-07-08T11:59:34.498933Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-07-08T11:59:34.499671Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} session: ydb://session/3?node_id=1&id=MzA0ZDk3N2UtNWU4ODM2NGUtY2IyMmFlMTQtOWY4YzUzZGQ= 2025-07-08T11:59:34.500071Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} session: ydb://session/3?node_id=1&id=Yzg2NzcxMmItOTFmNTczYjctODczZWUyZTQtNmRhZDJiMTk= 2025-07-08T11:59:34.500285Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} session: ydb://session/3?node_id=1&id=MTI0NTVmZTgtNDQ4ZTMxMzYtZTc0ZTkzNS1mYTg4Y2Y3NQ== 2025-07-08T11:59:34.500489Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} session: ydb://session/3?node_id=1&id=MmQ5NWRlNjctNjZkZDk4Y2UtYmE1MDExN2QtNjhiYWE0NTY= 2025-07-08T11:59:34.500678Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} session: ydb://session/3?node_id=1&id=MWNkY2Q0ZmEtZmQ3N2QzMGYtYTk0YThiNjItZjUxODJhMjE= 2025-07-08T11:59:34.641724Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} finished in 1751975974.641709s, errors=0 2025-07-08T11:59:34.641757Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} finished in 1751975974.641755s, errors=0 2025-07-08T11:59:34.641872Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1751975974641 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:34.641880Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} finished in 1751975974.641878s, errors=0 2025-07-08T11:59:34.641904Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1751975974641 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:34.641909Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} finished in 1751975974.641907s, errors=0 2025-07-08T11:59:34.641919Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1751975974641 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:34.641935Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1751975974641 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:34.641941Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} finished in 1751975974.641939s, errors=0 2025-07-08T11:59:34.641953Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1751975974641 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:34.641959Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} finished in 0.143090s, oks# 20, errors# 0 2025-07-08T11:59:34.641975Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-07-08T11:59:34.363046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000c06/r3tmp/tmpJnhBKy/pdisk_1.dat 2025-07-08T11:59:34.525977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.544897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:34.581606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:34.581646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:34.592196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:34.686996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.948042Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-07-08T11:59:34.948079Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-07-08T11:59:35.036243Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor finished in 0.088099s, errors=0 2025-07-08T11:59:35.036279Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 >> TNetClassifierTest::TestInitFromBadlyFormattedFile >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-07-08T11:59:33.550220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000ca8/r3tmp/tmpfSmqjN/pdisk_1.dat 2025-07-08T11:59:33.682760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:33.703122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:33.740404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:33.740443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:33.755682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:33.854494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.097794Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-07-08T11:59:34.097836Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-07-08T11:59:34.186744Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor finished in 0.088837s, errors=0 2025-07-08T11:59:34.186778Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 2025-07-08T11:59:34.800530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000ca8/r3tmp/tmpwIRhjQ/pdisk_1.dat 2025-07-08T11:59:34.950134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.970385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.005531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.005573Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:35.017436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:35.099553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.320437Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-07-08T11:59:35.320472Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-07-08T11:59:35.392974Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor finished in 0.072441s, errors=0 2025-07-08T11:59:35.393014Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:676:2570] with tag# 2 |65.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TxUsage::WriteToTopic_Demo_16 [GOOD] >> TNetClassifierTest::TestInitFromFile [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TxUsage::WriteToTopic_Demo_17 >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-07-08T11:59:35.251560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679518811467816:2245];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00115a/r3tmp/tmp0FwEKi/pdisk_1.dat 2025-07-08T11:59:35.309136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:59:35.389063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.405152Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/00115a/r3tmp/yandexCodGCr.tmp 2025-07-08T11:59:35.405166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/00115a/r3tmp/yandexCodGCr.tmp 2025-07-08T11:59:35.405349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.405364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:35.409206Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/00115a/r3tmp/yandexCodGCr.tmp 2025-07-08T11:59:35.409286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:59:35.409643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:36.237056Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-07-08T11:59:34.500409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bcb/r3tmp/tmptqWRt9/pdisk_1.dat 2025-07-08T11:59:34.638229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.657614Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:34.693397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:34.693433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:34.705142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:34.789874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.046260Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-07-08T11:59:35.046534Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-07-08T11:59:35.082775Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 1} TUpsertActor finished in 0.036195s, errors=0 2025-07-08T11:59:35.083031Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-07-08T11:59:35.083051Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:684:2578] with id# {Tag: 0, parent: [1:675:2569], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-07-08T11:59:35.083344Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:675:2569], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-07-08T11:59:35.083370Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:687:2581] 2025-07-08T11:59:35.083380Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Bootstrap called, sample# 0 2025-07-08T11:59:35.083385Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Connect to# 72075186224037888 called 2025-07-08T11:59:35.083565Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:35.085758Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} finished in 0.002180s, read# 1000 2025-07-08T11:59:35.085803Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:687:2581] with chunkSize# 0 finished: 0 { DurationMs: 2 OperationsOK: 1000 OperationsError: 0 } 2025-07-08T11:59:35.085824Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:690:2584] 2025-07-08T11:59:35.085830Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 2} Bootstrap called, sample# 0 2025-07-08T11:59:35.085834Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 2} Connect to# 72075186224037888 called 2025-07-08T11:59:35.085882Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:35.108720Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 2} finished in 0.022822s, read# 1000 2025-07-08T11:59:35.108769Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:690:2584] with chunkSize# 1 finished: 0 { DurationMs: 22 OperationsOK: 1000 OperationsError: 0 } 2025-07-08T11:59:35.108795Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:693:2587] 2025-07-08T11:59:35.108803Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 3} Bootstrap called, sample# 0 2025-07-08T11:59:35.108807Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 3} Connect to# 72075186224037888 called 2025-07-08T11:59:35.108867Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:35.116177Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 3} finished in 0.007296s, read# 1000 2025-07-08T11:59:35.116231Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:693:2587] with chunkSize# 10 finished: 0 { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 } 2025-07-08T11:59:35.116260Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:696:2590] 2025-07-08T11:59:35.116267Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 4} Bootstrap called, sample# 1000 2025-07-08T11:59:35.116272Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 4} Connect to# 72075186224037888 called 2025-07-08T11:59:35.116331Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:35.116883Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 4} finished in 0.000414s, sampled# 1000, iter finished# 1, oks# 1000 2025-07-08T11:59:35.116899Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:675:2569], subTag: 3} received keyCount# 1000 2025-07-08T11:59:35.116937Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:675:2569], subTag: 3} started read actor with id# [1:699:2593] 2025-07-08T11:59:35.116943Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:684:2578], subTag: 5} Bootstrap called, will read keys# 1000 2025-07-08T11:59:35.138363Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:675:2569], subTag: 3} received point times# 1000, Inflight left# 0 2025-07-08T11:59:35.138433Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2025-07-08T11:59:35.138473Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:675:2569], subTag: 3} finished in 0.055384s with report: { DurationMs: 2 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 22 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 7 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-07-08T11:59:35.138551Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:684:2578] with tag# 3 2025-07-08T11:59:35.834027Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bcb/r3tmp/tmp46wkA9/pdisk_1.dat 2025-07-08T11:59:35.949267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.963833Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.999793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.999829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:36.013519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:36.098513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:36.333780Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-07-08T11:59:36.333859Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-07-08T11:59:36.360511Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 1} TUpsertActor finished in 0.026593s, errors=0 2025-07-08T11:59:36.360649Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-07-08T11:59:36.360667Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:684:2578] with id# {Tag: 0, parent: [2:675:2569], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-07-08T11:59:36.361421Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:675:2569], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-07-08T11:59:36.361448Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:687:2581] 2025-07-08T11:59:36.361462Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Bootstrap called, sample# 0 2025-07-08T11:59:36.361467Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Connect to# 72075186224037888 called 2025-07-08T11:59:36.361522Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:36.361688Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} finished in 0.000160s, read# 10 2025-07-08T11:59:36.361711Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:687:2581] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-07-08T11:59:36.361722Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:690:2584] 2025-07-08T11:59:36.361726Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 2} Bootstrap called, sample# 0 2025-07-08T11:59:36.361730Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 2} Connect to# 72075186224037888 called 2025-07-08T11:59:36.361757Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:36.361931Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 2} finished in 0.000170s, read# 10 2025-07-08T11:59:36.361943Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:690:2584] with chunkSize# 1 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-07-08T11:59:36.361951Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:693:2587] 2025-07-08T11:59:36.361955Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 3} Bootstrap called, sample# 0 2025-07-08T11:59:36.361958Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 3} Connect to# 72075186224037888 called 2025-07-08T11:59:36.361987Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:36.362036Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 3} finished in 0.000046s, read# 10 2025-07-08T11:59:36.362046Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:693:2587] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2025-07-08T11:59:36.362056Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:696:2590] 2025-07-08T11:59:36.362061Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 4} Bootstrap called, sample# 10 2025-07-08T11:59:36.362064Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 4} Connect to# 72075186224037888 called 2025-07-08T11:59:36.362091Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:36.362126Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 4} finished in 0.000027s, sampled# 10, iter finished# 1, oks# 10 2025-07-08T11:59:36.362136Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:675:2569], subTag: 3} received keyCount# 10 2025-07-08T11:59:36.362166Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:675:2569], subTag: 3} started read actor with id# [2:699:2593] 2025-07-08T11:59:36.362171Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:684:2578], subTag: 5} Bootstrap called, will read keys# 10 2025-07-08T11:59:36.383460Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:675:2569], subTag: 3} received point times# 1000, Inflight left# 0 2025-07-08T11:59:36.383537Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" } 2025-07-08T11:59:36.383563Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:675:2569], subTag: 3} finished in 0.022874s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 21 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 1\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-07-08T11:59:36.383583Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:684:2578] with tag# 3 |65.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-07-08T11:59:36.237144Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679522618819468:2235];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001107/r3tmp/tmpIWTybY/pdisk_1.dat 2025-07-08T11:59:36.326599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:59:36.395651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:36.433642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:36.433668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:36.433972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:36.438949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001107/r3tmp/yandexHTRHsg.tmp 2025-07-08T11:59:36.438961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001107/r3tmp/yandexHTRHsg.tmp 2025-07-08T11:59:36.445009Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-07-08T11:59:36.445033Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/43nv/001107/r3tmp/yandexHTRHsg.tmp 2025-07-08T11:59:36.445099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-07-08T11:59:34.118982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bd3/r3tmp/tmpz944D4/pdisk_1.dat 2025-07-08T11:59:34.246064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.264941Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:34.305503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:34.305541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:34.317445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:34.401271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:34.635302Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:34.635344Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-07-08T11:59:34.741216Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor finished in 0.105804s, errors=0 2025-07-08T11:59:34.741251Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 2025-07-08T11:59:35.707495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bd3/r3tmp/tmpjHJpNy/pdisk_1.dat 2025-07-08T11:59:35.849529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.870164Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.909511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.909551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:35.921409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:36.002919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:36.232162Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:36.232195Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-07-08T11:59:36.305256Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor finished in 0.073000s, errors=0 2025-07-08T11:59:36.305297Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:676:2570] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] |65.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-07-08T11:59:01.248314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:01.252190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:01.252234Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:01.252944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:01.253021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:01.253056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:01.253080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:01.253099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:01.253120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:01.253140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:01.253158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:01.253174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:01.253192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.253211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:01.253229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:01.258443Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:01.258637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:01.258649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:01.258675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.258714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:01.258725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:01.258733Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:01.258743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:01.258751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:01.258758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:01.258763Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:01.258787Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.258795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:01.258801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:01.258806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:01.258815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:01.258822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:01.258829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:01.258833Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:01.258841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:01.258848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:01.258852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:01.258875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:01.258882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:01.258886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:01.258906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:01.258915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:01.258920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:01.258933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:01.258940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.258944Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.258953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:01.258961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:01.258969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:01.258974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:01.259008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:59:01.259017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:59:01.259026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:01.259036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:01.259046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:01.259057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:01.259064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:01.259069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:01.259094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:01.259100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... on=184; 2025-07-08T11:59:35.608254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=60;sum=780800;count=14327; 2025-07-08T11:59:35.608260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=172;sum=1468544;count=14328;size_of_meta=112; 2025-07-08T11:59:35.608265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1984352;count=7164;size_of_portion=184; 2025-07-08T11:59:35.608326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=18923; 2025-07-08T11:59:35.608338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:35.608506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=160; 2025-07-08T11:59:35.608512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=19128; 2025-07-08T11:59:35.608516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=19139; 2025-07-08T11:59:35.608524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-07-08T11:59:35.608594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=65; 2025-07-08T11:59:35.608599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=19275; 2025-07-08T11:59:35.608634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=27; 2025-07-08T11:59:35.608651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T11:59:35.608708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=52; 2025-07-08T11:59:35.608744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=31; 2025-07-08T11:59:35.619322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10552; 2025-07-08T11:59:35.636593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17219; 2025-07-08T11:59:35.636634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-07-08T11:59:35.636643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:35.636649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:35.636668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=15; 2025-07-08T11:59:35.636675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:35.636692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-07-08T11:59:35.636698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:35.636711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:35.636728Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-07-08T11:59:35.636746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=13; 2025-07-08T11:59:35.636752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=48477; 2025-07-08T11:59:35.636797Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=21623968;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=141321168;raw_bytes=137674250;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:35.636835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:35.636844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:35.636862Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:35.636870Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:35.636908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:35.636926Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:35.636941Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975646658;tx_id=18446744073709551615;;current_snapshot_ts=1751975943072; 2025-07-08T11:59:35.636963Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:35.636975Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:35.636981Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:35.637007Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:35.638497Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:35.638602Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:35.638606Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:35.638610Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:35.638616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:35.638633Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:35.638642Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975646658;tx_id=18446744073709551615;;current_snapshot_ts=1751975943072; 2025-07-08T11:59:35.638649Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:35.638657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:35.638662Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:35.638676Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:35.638683Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] |65.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-07-08T11:59:35.072775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b88/r3tmp/tmp2YDkMd/pdisk_1.dat 2025-07-08T11:59:35.231282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.249160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.297389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.297423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:35.309261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:35.388228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.654787Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:35.654820Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-07-08T11:59:35.727870Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 2} TUpsertActor finished in 0.072991s, errors=0 2025-07-08T11:59:35.727898Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 2025-07-08T11:59:36.786589Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b88/r3tmp/tmpWbyAmE/pdisk_1.dat 2025-07-08T11:59:36.900841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:36.915978Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:36.958476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:36.958515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:36.969410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:37.049714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:37.280261Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:37.280293Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-07-08T11:59:37.354492Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor finished in 0.074145s, errors=0 2025-07-08T11:59:37.354527Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:676:2570] with tag# 2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-07-08T11:59:02.163235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:02.167715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:02.167768Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:02.168474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:02.168530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:02.168565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:02.168588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:02.168605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:02.168624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:02.168643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:02.168660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:02.168676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:02.168693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.168709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:02.168725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:02.174820Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:02.175004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:02.175015Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:02.175046Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:02.175105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:02.175121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:02.175127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:02.175137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:02.175145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:02.175153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:02.175158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:02.175181Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:02.175189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:02.175196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:02.175200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:02.175210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:02.175217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:02.175256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:02.175262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:02.175272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:02.175279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:02.175283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:02.175308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:02.175315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:02.175319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:02.175341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:02.175349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:02.175353Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:02.175367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:02.175375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.175379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:02.175387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:02.175395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:02.175402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:02.175406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:02.175443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:02.175453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:59:02.175462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:02.175473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:02.175482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:02.175494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:02.175502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:02.175507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:02.175521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:02.175527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... ion=184; 2025-07-08T11:59:36.793148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=60;sum=780800;count=14327; 2025-07-08T11:59:36.793158Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=172;sum=1468544;count=14328;size_of_meta=112; 2025-07-08T11:59:36.793165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1984352;count=7164;size_of_portion=184; 2025-07-08T11:59:36.793247Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=12322; 2025-07-08T11:59:36.793260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:36.793431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=163; 2025-07-08T11:59:36.793439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=12537; 2025-07-08T11:59:36.793444Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=12549; 2025-07-08T11:59:36.793451Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-07-08T11:59:36.793519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=63; 2025-07-08T11:59:36.793524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=12684; 2025-07-08T11:59:36.793562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=30; 2025-07-08T11:59:36.793581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:36.793649Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=63; 2025-07-08T11:59:36.793699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=43; 2025-07-08T11:59:36.800770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7054; 2025-07-08T11:59:36.811218Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10413; 2025-07-08T11:59:36.811252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-07-08T11:59:36.811260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:36.811267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:36.811287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=14; 2025-07-08T11:59:36.811294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:36.811312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-07-08T11:59:36.811319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:36.811331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:36.811352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=14; 2025-07-08T11:59:36.811370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=13; 2025-07-08T11:59:36.811376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31601; 2025-07-08T11:59:36.811421Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=21623968;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=141321168;raw_bytes=137674250;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:36.811460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:36.811470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:36.811490Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:36.811497Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:36.811539Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:36.811559Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:36.811574Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975647573;tx_id=18446744073709551615;;current_snapshot_ts=1751975943987; 2025-07-08T11:59:36.811584Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:36.811595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:36.811600Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:36.811635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:36.815666Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:36.815774Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:36.815781Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:36.815785Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:36.815792Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:36.815814Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:36.815826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975647573;tx_id=18446744073709551615;;current_snapshot_ts=1751975943987; 2025-07-08T11:59:36.815835Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:36.815847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:36.815851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:36.815870Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:36.815879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> UpsertLoad::ShouldDropCreateTable [GOOD] |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-07-08T11:59:03.214160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:03.218386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:03.218447Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:03.219228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:03.219288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:03.219328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:03.219351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:03.219369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:03.219391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:03.219410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:03.219427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:03.219444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:03.219462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.219479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:03.219498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:03.225962Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:03.226180Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:03.226192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:03.226225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:03.226277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:03.226292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:03.226297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:03.226307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:03.226316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:03.226323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:03.226327Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:03.226350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:03.226358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:03.226365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:03.226369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:03.226379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:03.226386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:03.226392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:03.226396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:03.226405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:03.226413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:03.226417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:03.226439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:03.226446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:03.226450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:03.226470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:03.226478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:03.226482Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:03.226495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:03.226502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.226506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:03.226514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:03.226521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:03.226528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:03.226532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:03.226572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=12; 2025-07-08T11:59:03.226582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:03.226590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:59:03.226602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:59:03.226611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:03.226623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:03.226631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:03.226636Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:03.226649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:03.226655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... _portion=184; 2025-07-08T11:59:37.454646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=60;sum=780800;count=14327; 2025-07-08T11:59:37.454653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=172;sum=1468544;count=14328;size_of_meta=112; 2025-07-08T11:59:37.454658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=244;sum=1984352;count=7164;size_of_portion=184; 2025-07-08T11:59:37.454720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7872; 2025-07-08T11:59:37.454731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:37.454867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=128; 2025-07-08T11:59:37.454876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8049; 2025-07-08T11:59:37.454882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8063; 2025-07-08T11:59:37.454889Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:37.454948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=53; 2025-07-08T11:59:37.454955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8196; 2025-07-08T11:59:37.454985Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=22; 2025-07-08T11:59:37.455001Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T11:59:37.455056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=49; 2025-07-08T11:59:37.455092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=30; 2025-07-08T11:59:37.460358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5251; 2025-07-08T11:59:37.467107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6713; 2025-07-08T11:59:37.467142Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-07-08T11:59:37.467149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:37.467156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:37.467171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-07-08T11:59:37.467179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:37.467197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-07-08T11:59:37.467203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:37.467217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=8; 2025-07-08T11:59:37.467235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-07-08T11:59:37.467253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-07-08T11:59:37.467259Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21460; 2025-07-08T11:59:37.467305Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=21623968;raw_bytes=21227350;count=3;records=225200} inactive {blob_bytes=141321168;raw_bytes=137674250;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:37.467346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:37.467358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:37.467376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:37.467385Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:37.467421Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:37.467441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:37.467456Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975648623;tx_id=18446744073709551615;;current_snapshot_ts=1751975945037; 2025-07-08T11:59:37.467464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:37.467476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.467482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.467506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:37.469700Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:37.469845Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:37.469852Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:37.469856Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:37.469864Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:37.469882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:37.469894Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975648623;tx_id=18446744073709551615;;current_snapshot_ts=1751975945037; 2025-07-08T11:59:37.469903Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:37.469913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.469919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.469937Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:37.469947Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9671:11273];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> KqpScanSpilling::SelfJoinQueryService |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-07-08T11:59:35.991623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b6f/r3tmp/tmpeuOCUH/pdisk_1.dat 2025-07-08T11:59:36.132296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:36.158003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:36.197556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:36.197600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:36.208411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:36.307104Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-07-08T11:59:36.355729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:36.608403Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-07-08T11:59:36.608737Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:578:2498], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-07-08T11:59:36.635981Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:578:2498], subTag: 1} TUpsertActor finished in 0.027184s, errors=0 2025-07-08T11:59:36.636086Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:36.636124Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:578:2498], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-07-08T11:59:36.697483Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:578:2498], subTag: 3} TUpsertActor finished in 0.061286s, errors=0 2025-07-08T11:59:36.697520Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:683:2576] with tag# 3 2025-07-08T11:59:37.523685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b6f/r3tmp/tmpZd2vhp/pdisk_1.dat 2025-07-08T11:59:37.646874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:37.662279Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:37.698193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:37.698232Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:37.713452Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:37.804709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:38.064285Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-07-08T11:59:38.064319Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-07-08T11:59:38.507672Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 2} TUpsertActor finished in 0.443297s, errors=0 2025-07-08T11:59:38.507705Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:676:2570] with tag# 2 2025-07-08T11:59:38.508915Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-07-08T11:59:38.557106Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:38.708164Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-07-08T11:59:38.718428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T11:59:38.768133Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-07-08T11:59:38.932298Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-07-08T11:59:38.932366Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:714:2608], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-07-08T11:59:38.953283Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:714:2608], subTag: 1} TUpsertActor finished in 0.020862s, errors=0 2025-07-08T11:59:38.953389Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-07-08T11:59:38.953413Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:714:2608], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-07-08T11:59:39.013181Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:714:2608], subTag: 3} TUpsertActor finished in 0.059709s, errors=0 2025-07-08T11:59:39.013211Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:857:2723] with tag# 3 |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> KqpScanSpilling::SelfJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-07-08T11:59:00.380443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:00.384475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:00.384520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:00.385150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:00.385204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:00.385236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:00.385258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:00.385275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:00.385295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:00.385314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:00.385332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:00.385349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:00.385367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.385384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:00.385402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:00.391261Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:00.391429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:00.391440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:00.391466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.391504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:00.391515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:00.391520Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:00.391529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:00.391537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:00.391544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:00.391547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:00.391562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.391567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:00.391572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:00.391575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:00.391581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:00.391585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:00.391590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:00.391592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:00.391598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:00.391603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:00.391607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:00.391621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:00.391625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:00.391628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:00.391639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:00.391644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:00.391646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:00.391654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:00.391659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.391661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.391666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:00.391670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:00.391674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:00.391676Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:00.391703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:59:00.391712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:00.391718Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:59:00.391725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:00.391732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:00.391739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:00.391745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:00.391748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:00.391756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:00.391759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... _portion=184; 2025-07-08T11:59:37.526773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=68;sum=787448;count=14327; 2025-07-08T11:59:37.526781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=180;sum=1475192;count=14328;size_of_meta=112; 2025-07-08T11:59:37.526787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1991000;count=7164;size_of_portion=184; 2025-07-08T11:59:37.526853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7455; 2025-07-08T11:59:37.526865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=3; 2025-07-08T11:59:37.527010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=138; 2025-07-08T11:59:37.527017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7640; 2025-07-08T11:59:37.527022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7652; 2025-07-08T11:59:37.527030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=2; 2025-07-08T11:59:37.527099Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=65; 2025-07-08T11:59:37.527105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7787; 2025-07-08T11:59:37.527140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=26; 2025-07-08T11:59:37.527156Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T11:59:37.527215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=52; 2025-07-08T11:59:37.527252Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=33; 2025-07-08T11:59:37.533343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6072; 2025-07-08T11:59:37.541388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8002; 2025-07-08T11:59:37.541424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=3; 2025-07-08T11:59:37.541431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:37.541438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2025-07-08T11:59:37.541453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-07-08T11:59:37.541460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:37.541478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=13; 2025-07-08T11:59:37.541484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:37.541499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:37.541516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=11; 2025-07-08T11:59:37.541532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-07-08T11:59:37.541537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23171; 2025-07-08T11:59:37.541581Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22538992;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=147791880;raw_bytes=143975050;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:37.541618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:37.541628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:37.541646Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:37.541653Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:37.541692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:37.541710Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:37.541724Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645934;tx_id=18446744073709551615;;current_snapshot_ts=1751975942209; 2025-07-08T11:59:37.541732Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:37.541741Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.541746Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.541772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:37.543117Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:37.543208Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:37.543214Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:37.543217Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:37.543223Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:37.543241Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:37.543251Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645934;tx_id=18446744073709551615;;current_snapshot_ts=1751975942209; 2025-07-08T11:59:37.543257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:37.543265Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.543269Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:37.543284Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:37.543291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> LocalPartition::DiscoveryServiceBadNodeId [GOOD] >> LocalPartition::DescribeHang >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-07-08T11:59:34.983348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000ba2/r3tmp/tmpsQSuPk/pdisk_1.dat 2025-07-08T11:59:35.118102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.140670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:35.181503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:35.181541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:35.193404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:35.281621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:35.498949Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-07-08T11:59:35.499209Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-07-08T11:59:35.524534Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:675:2569], subTag: 1} TUpsertActor finished in 0.025280s, errors=0 2025-07-08T11:59:35.524629Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-07-08T11:59:35.524645Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-07-08T11:59:35.524930Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-07-08T11:59:35.524973Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} started fullscan actor# [1:687:2581] 2025-07-08T11:59:35.524984Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Bootstrap called, sample# 100 2025-07-08T11:59:35.524987Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Connect to# 72075186224037888 called 2025-07-08T11:59:35.525170Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:35.525375Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:684:2578], subTag: 1} finished in 0.000184s, sampled# 100, iter finished# 1, oks# 100 2025-07-08T11:59:35.525395Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} received keyCount# 100 2025-07-08T11:59:35.525421Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} started# 10 actors each with inflight# 1 2025-07-08T11:59:35.525431Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 2} Bootstrap called 2025-07-08T11:59:35.525435Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525442Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 3} Bootstrap called 2025-07-08T11:59:35.525445Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525449Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 4} Bootstrap called 2025-07-08T11:59:35.525452Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525456Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 5} Bootstrap called 2025-07-08T11:59:35.525458Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525462Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 6} Bootstrap called 2025-07-08T11:59:35.525465Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525469Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 7} Bootstrap called 2025-07-08T11:59:35.525472Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525475Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 8} Bootstrap called 2025-07-08T11:59:35.525480Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525484Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 9} Bootstrap called 2025-07-08T11:59:35.525487Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525491Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 10} Bootstrap called 2025-07-08T11:59:35.525494Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.525498Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 11} Bootstrap called 2025-07-08T11:59:35.525501Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-07-08T11:59:35.526042Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 2} session: ydb://session/3?node_id=1&id=ZGI4NGEyM2EtNTlhZTk4YTEtYTYyNGM2Y2QtMjk0Mjc5ODA= 2025-07-08T11:59:35.526117Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 3} session: ydb://session/3?node_id=1&id=MmMwMjgwODUtZDIwOTRiNzMtNDFiNmIxMGEtNjAzODJkNzQ= 2025-07-08T11:59:35.526300Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 4} session: ydb://session/3?node_id=1&id=YmEyNGQ2NWQtZDEyZmMwMGEtOTE4NjM0MmEtYjBiM2ViOGM= 2025-07-08T11:59:35.526467Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 5} session: ydb://session/3?node_id=1&id=YzUzNGM3ZWYtYjY4ZTZhM2ItZDFkYjUzOC00YzhlZTVkNg== 2025-07-08T11:59:35.526632Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 6} session: ydb://session/3?node_id=1&id=ZDJkYzc4ZjQtZjZhMTZhMTEtNDQxZTg1OWEtZTRlNWUyNzI= 2025-07-08T11:59:35.526798Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 7} session: ydb://session/3?node_id=1&id=ODRkZDE5YjctMTNhMjkyODUtOWVkODQyMjMtOGVjZjkyZWM= 2025-07-08T11:59:35.526978Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 8} session: ydb://session/3?node_id=1&id=MjcwMzUyMjgtYjlkMjkwMWQtZmIwM2M2OWMtZWYxMzUxY2Y= 2025-07-08T11:59:35.527134Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 9} session: ydb://session/3?node_id=1&id=NmE1NGQ2YzQtOWJmN2IwOWYtN2U0OTExY2MtZGM3MjNlMjY= 2025-07-08T11:59:35.527297Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 10} session: ydb://session/3?node_id=1&id=YmY1YjMzOTItNTM1ZDc4NGEtZTdlODk2MDItMmNlY2RmNmM= 2025-07-08T11:59:35.527465Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 11} session: ydb://session/3?node_id=1&id=M2M2ODY0ODktY2M3NjA2MzYtZGYzYzY1ODItMzgzMTVkZg== 2025-07-08T11:59:36.271976Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 2} finished in 0.745909s, errors=0 2025-07-08T11:59:36.272090Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 2 { Tag: 2 DurationMs: 745 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.272323Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 3} finished in 0.746199s, errors=0 2025-07-08T11:59:36.272491Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 3 { Tag: 3 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.272501Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 4} finished in 0.746196s, errors=0 2025-07-08T11:59:36.272667Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 4 { Tag: 4 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.272851Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 5} finished in 0.746377s, errors=0 2025-07-08T11:59:36.273048Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 5 { Tag: 5 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.273059Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 6} finished in 0.746422s, errors=0 2025-07-08T11:59:36.273173Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 6 { Tag: 6 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.273287Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 7} finished in 0.746483s, errors=0 2025-07-08T11:59:36.273295Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 8} finished in 0.746313s, errors=0 2025-07-08T11:59:36.273377Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 7 { Tag: 7 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.273460Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 8 { Tag: 8 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.273467Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 9} finished in 0.746328s, errors=0 2025-07-08T11:59:36.273579Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 3} finished: 9 { Tag: 9 DurationMs: 746 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:36.273586Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [1:684:2578], subTag: 10} finished in 0.746284s, errors=0 2025-07-08T11:59:36.273641Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:675:25 ... Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:37.313689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:37.394735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:37.669319Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-07-08T11:59:37.669403Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-07-08T11:59:37.693359Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:675:2569], subTag: 1} TUpsertActor finished in 0.023892s, errors=0 2025-07-08T11:59:37.693469Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 10 ReadCount: 100 Inflights: 10 } 2025-07-08T11:59:37.693488Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 100 Inflights: 10 2025-07-08T11:59:37.693822Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-07-08T11:59:37.693845Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} started fullscan actor# [2:687:2581] 2025-07-08T11:59:37.693857Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Bootstrap called, sample# 10 2025-07-08T11:59:37.693861Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Connect to# 72075186224037888 called 2025-07-08T11:59:37.693916Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-07-08T11:59:37.694083Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:684:2578], subTag: 1} finished in 0.000157s, sampled# 10, iter finished# 1, oks# 10 2025-07-08T11:59:37.694102Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} received keyCount# 10 2025-07-08T11:59:37.694133Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} started# 10 actors each with inflight# 1 2025-07-08T11:59:37.694139Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 2} Bootstrap called 2025-07-08T11:59:37.694144Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 2} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694151Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 3} Bootstrap called 2025-07-08T11:59:37.694154Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 3} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694159Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 4} Bootstrap called 2025-07-08T11:59:37.694162Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 4} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694166Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 5} Bootstrap called 2025-07-08T11:59:37.694169Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 5} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694173Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 6} Bootstrap called 2025-07-08T11:59:37.694177Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 6} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694181Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 7} Bootstrap called 2025-07-08T11:59:37.694185Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 7} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694189Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 8} Bootstrap called 2025-07-08T11:59:37.694196Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 8} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694200Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 9} Bootstrap called 2025-07-08T11:59:37.694204Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 9} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694208Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 10} Bootstrap called 2025-07-08T11:59:37.694212Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 10} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694217Z node 2 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 11} Bootstrap called 2025-07-08T11:59:37.694219Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 11} sends event for session creation to proxy: [2:8678280833929343339:121] 2025-07-08T11:59:37.694679Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 2} session: ydb://session/3?node_id=2&id=MzhmZDdlYzItYWQwNjE4NzgtMmNmOTNlM2EtYzA1ZTI3OTA= 2025-07-08T11:59:37.694982Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 3} session: ydb://session/3?node_id=2&id=NWEwMzMyMzAtYzI1ZWEwMWUtZWRmYzdhZGItYWIwYWNiMjQ= 2025-07-08T11:59:37.695203Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 4} session: ydb://session/3?node_id=2&id=ZWM5YmYxODAtZTZiY2IwMjgtODU4OGQ3OWItYmYyNDJhNDk= 2025-07-08T11:59:37.695420Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 5} session: ydb://session/3?node_id=2&id=MTNjZGZjYTktYzhlZTIyMmYtOWI0OGY5OTEtNDU0NTdhY2E= 2025-07-08T11:59:37.695650Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 6} session: ydb://session/3?node_id=2&id=OGQ5OTk2NGUtMjhlZjA0NWItZDNiYTkyNTktMjRmMmFjZTM= 2025-07-08T11:59:37.695850Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 7} session: ydb://session/3?node_id=2&id=OTI2M2Y5YWItYTZhMzI4NTktZWVkNmFjNTctYjJlOWYxYjA= 2025-07-08T11:59:37.696043Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 8} session: ydb://session/3?node_id=2&id=ZGEzYzU0NTMtZWZjNjYxN2UtNGNkNTFiNjQtMjQ4NTdlMTk= 2025-07-08T11:59:37.696241Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 9} session: ydb://session/3?node_id=2&id=NDk2ZWQ2NTAtMTAwYzljYTgtZWYzYzAzNjgtMzBhOTc4Mg== 2025-07-08T11:59:37.696439Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 10} session: ydb://session/3?node_id=2&id=MmVhYmE1MTgtMjBhYjZjNDUtY2Q4MGE1ZTUtZTEyMzJjN2U= 2025-07-08T11:59:37.696634Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 11} session: ydb://session/3?node_id=2&id=ZWUyZGM0YjEtNjA4YmEzMTUtNGQ5MWNiYzItZDJiNjY1YWU= 2025-07-08T11:59:39.330959Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 2} finished in 1.636244s, errors=0 2025-07-08T11:59:39.331415Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 2 { Tag: 2 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.331433Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 3} finished in 1.636441s, errors=0 2025-07-08T11:59:39.331441Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 4} finished in 1.636231s, errors=0 2025-07-08T11:59:39.331500Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 3 { Tag: 3 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.331686Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 4 { Tag: 4 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.331856Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 5} finished in 1.636426s, errors=0 2025-07-08T11:59:39.332030Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332038Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 6} finished in 1.636380s, errors=0 2025-07-08T11:59:39.332161Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 6 { Tag: 6 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332168Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 7} finished in 1.636310s, errors=0 2025-07-08T11:59:39.332263Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332271Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 8} finished in 1.636220s, errors=0 2025-07-08T11:59:39.332361Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 8 { Tag: 8 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332431Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 9} finished in 1.636181s, errors=0 2025-07-08T11:59:39.332487Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 9 { Tag: 9 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332554Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 10} finished in 1.636108s, errors=0 2025-07-08T11:59:39.332628Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 10 { Tag: 10 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332668Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:684:2578], subTag: 11} finished in 1.636026s, errors=0 2025-07-08T11:59:39.332711Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished: 11 { Tag: 11 DurationMs: 1636 OperationsOK: 100 OperationsError: 0 } 2025-07-08T11:59:39.332717Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:675:2569], subTag: 3} finished in 1.638608s, oks# 1000, errors# 0 2025-07-08T11:59:39.332770Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:684:2578] with tag# 3 >> KqpScanSpilling::HandleErrorsCorrectly |65.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |65.6%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource >> KqpScanSpilling::SpillingPragmaParseError >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |65.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001ce5/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 17905, MsgBus: 7179 2025-07-08T11:59:40.074928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679538553654622:2081];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:40.076520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001ce5/r3tmp/tmpuVDYdy/pdisk_1.dat 2025-07-08T11:59:40.181085Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:40.197751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:40.197780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:40.201976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17905, node 1 2025-07-08T11:59:40.236127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:40.236144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:40.236146Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:40.236191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7179 TClient is connected to server localhost:7179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:40.447740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.462497Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:59:40.470003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.518707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.608413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.653991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.842535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:40.869574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:40.936889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:40.969114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.001111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.021632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.037532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.065198Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '706) '('"_id" '"ff05c593-2a8ae39a-d14fc7c8-fbff187e")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '551) '('"_id" '"2358232a-e00da43f-d8a3bbea-61f970a1") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '621) '('"_id" '"a40bd6b5-b7ac5c9b-f6ad905f-e12b0017") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '633) '('"_id" '"d1baf165-fc2a2e4d-2cabbf0c-b484f020")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001cea/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 21452, MsgBus: 9298 2025-07-08T11:59:40.195628Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679539986647818:2223];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:40.195655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001cea/r3tmp/tmpJysfa7/pdisk_1.dat 2025-07-08T11:59:40.292844Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21452, node 1 2025-07-08T11:59:40.337314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:40.337343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:40.341144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:40.341156Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:40.341158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:40.341200Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:59:40.341430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9298 TClient is connected to server localhost:9298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:40.541931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.547811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:59:40.559154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.631080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:59:40.672296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.697380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.106549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.126521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.148626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.171385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.187452Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:41.189769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.208913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.223117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.826052Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-07-08T11:59:41.826124Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-07-08T11:59:41.826157Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2025-07-08T11:59:41.826168Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-07-08T11:59:41.826371Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:48 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS OptionalSorting: 1 2025-07-08T11:59:41.826389Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617688:2503], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Start compute actor [1:7524679544281617688:2503], task: 1 2025-07-08T11:59:41.826395Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617688:2503], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Set periodic stats 1.000000s 2025-07-08T11:59:41.826400Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617688:2503], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. EVLOGKQP START 2025-07-08T11:59:41.826687Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617689:2504], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Start compute actor [1:7524679544281617689:2504], task: 2 2025-07-08T11:59:41.826691Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617689:2504], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Set periodic stats 1.000000s 2025-07-08T11:59:41.826798Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617689:2504], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. CA StateFunc 271646922 2025-07-08T11:59:41.828125Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617689:2504], TxId: 281474976715681, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Send stats to executor actor [1:7524679544281617684:2499] TaskId: 2 Stats: CpuTimeUs: 116 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 93 ComputeCpuTimeUs: 6 BuildCpuTimeUs: 87 HostName: "ghrun-3z2hjo4icm" NodeId: 1 CreateTimeMs: 1751975981826 CurrentWaitInputTimeUs: 10 UpdateTimeMs: 1751975981826 } MaxMemoryUsage: 104857600 2025-07-08T11:59:41.828144Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Start compute actor [1:7524679544281617690:2505], task: 3 2025-07-08T11:59:41.828149Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Set periodic stats 1.000000s 2025-07-08T11:59:41.828580Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.828640Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Send stats to execut ... RiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.933755Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.933815Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.933850Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.933856Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934198Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646927 2025-07-08T11:59:41.934201Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934207Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:59:41.934263Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646923 2025-07-08T11:59:41.934269Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 4. Finish input channelId: 4, from: [1:7524679544281617690:2505] 2025-07-08T11:59:41.934274Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934279Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646927 2025-07-08T11:59:41.934281Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934287Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-07-08T11:59:41.934289Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2025-07-08T11:59:41.934290Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 3. Tasks execution finished 2025-07-08T11:59:41.934307Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617690:2505], TxId: 281474976715681, task: 3. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Compute state finished. All channels and sinks finished 2025-07-08T11:59:41.934328Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 3. pass away 2025-07-08T11:59:41.934358Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715681;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T11:59:41.934448Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934534Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934543Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934627Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934663Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934673Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934676Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:59:41.934730Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934732Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:59:41.934767Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934771Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T11:59:41.934809Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. CA StateFunc 271646922 2025-07-08T11:59:41.934813Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-07-08T11:59:41.934814Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 4. Tasks execution finished 2025-07-08T11:59:41.934816Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7524679544281617691:2506], TxId: 281474976715681, task: 4. Ctx: { TraceId : 01jzmyfwp6ag26h25nsmb1bpa3. SessionId : ydb://session/3?node_id=1&id=MWI1MDdkNDQtMjFmMmY4YzMtMTE2MDE5NzctZTI2MjRiZDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Compute state finished. All channels and sinks finished 2025-07-08T11:59:41.934824Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715681, task: 4. pass away 2025-07-08T11:59:41.934836Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715681;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T11:59:41.935122Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751975981868, txId: 281474976715680] shutting down |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTRangeFaultToleranceTestErasureMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-07-08T11:59:01.285771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:01.289966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:01.290028Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:01.290815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:01.290888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:01.290925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:01.290948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:01.290966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:01.290985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:01.291008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:01.291026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:01.291043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:01.291060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.291094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:01.291113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:01.296208Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:01.296384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:01.296395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:01.296420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.296464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:01.296481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:01.296487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:01.296497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:01.296504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:01.296510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:01.296513Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:01.296529Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.296535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:01.296541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:01.296544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:01.296551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:01.296556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:01.296561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:01.296563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:01.296569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:01.296575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:01.296577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:01.296593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:01.296599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:01.296602Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:01.296617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:01.296622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:01.296625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:01.296634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:01.296638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.296641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.296646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:01.296651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:01.296655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:01.296658Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:01.296690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:01.296700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-07-08T11:59:01.296706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:01.296714Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:01.296722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:01.296732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:01.296738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:01.296742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:01.296750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:01.296754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:290:2301];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8; 2025-07-08T11:59:21.641907Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[247] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-07-08T11:59:21.644315Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:290:2301];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8; 2025-07-08T11:59:21.644596Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=129593352;raw_bytes=131330514;count=21;records=1575000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22114848;raw_bytes=22108350;count=3;records=225000} inactive {blob_bytes=370457464;raw_bytes=372291258;count=55;records=4050002} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:21.655851Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8; 2025-07-08T11:59:21.655871Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-07-08T11:59:21.656038Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::fbfdcaaa-5bf211f0-b0444e8f-c5025aa8; 2025-07-08T11:59:21.656052Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:21.656065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=columnshard_impl.cpp:481;event=skip_compaction;reason=disabled; 2025-07-08T11:59:21.656074Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:59:21.656087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:21.656097Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:21.656102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:21.656119Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.499000s; 2025-07-08T11:59:21.656127Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=fbfdcaaa-5bf211f0-b0444e8f-c5025aa8;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:21.656176Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:6043488:0] 2025-07-08T11:59:21.656184Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:35:4:0:6171112:0] 2025-07-08T11:59:21.656188Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:55:3:0:6043488:0] 2025-07-08T11:59:21.656192Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:39:2:0:6043488:0] 2025-07-08T11:59:21.656197Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:50:4:0:6043488:0] 2025-07-08T11:59:21.656201Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:27:2:0:6171112:0] 2025-07-08T11:59:21.656206Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6043488:0] 2025-07-08T11:59:21.656211Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:36:2:0:6043488:0] 2025-07-08T11:59:21.656216Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:45:2:0:6043488:0] 2025-07-08T11:59:21.656220Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:44:4:0:6043488:0] 2025-07-08T11:59:21.656225Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:23:4:0:6043488:0] 2025-07-08T11:59:21.656230Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6043488:0] 2025-07-08T11:59:21.656234Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:51:2:0:6171112:0] 2025-07-08T11:59:21.656239Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:34:3:0:6171112:0] 2025-07-08T11:59:21.656243Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:41:4:0:6171112:0] 2025-07-08T11:59:21.656248Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:53:4:0:6171112:0] 2025-07-08T11:59:21.656253Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:6043488:0] 2025-07-08T11:59:21.656257Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:54:2:0:6043488:0] 2025-07-08T11:59:21.656262Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6043488:0] 2025-07-08T11:59:21.656266Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:6043488:0] 2025-07-08T11:59:21.656271Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-07-08T11:59:21.656275Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:46:3:0:6171112:0] 2025-07-08T11:59:21.656280Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:26:4:0:6043488:0] 2025-07-08T11:59:21.656284Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:24:2:0:6171112:0] 2025-07-08T11:59:21.656288Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-07-08T11:59:21.656292Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:43:3:0:6171112:0] 2025-07-08T11:59:21.656296Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:22:3:0:6171112:0] 2025-07-08T11:59:21.656301Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:30:2:0:6043488:0] 2025-07-08T11:59:21.656309Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:40:3:0:6043488:0] 2025-07-08T11:59:21.656314Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:6043488:0] 2025-07-08T11:59:21.656321Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:31:3:0:6043488:0] 2025-07-08T11:59:21.656326Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:49:3:0:6043488:0] 2025-07-08T11:59:21.656330Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:48:2:0:6171112:0] 2025-07-08T11:59:21.656334Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:33:2:0:6043488:0] 2025-07-08T11:59:21.656339Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:29:4:0:6171112:0] 2025-07-08T11:59:21.656343Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:6171112:0] 2025-07-08T11:59:21.656348Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:6043488:0] 2025-07-08T11:59:21.656352Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:47:4:0:6043488:0] 2025-07-08T11:59:21.656357Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:38:4:0:6171112:0] 2025-07-08T11:59:21.656361Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:28:3:0:6043488:0] 2025-07-08T11:59:21.656366Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:32:4:0:1792:0] 2025-07-08T11:59:21.656371Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2025-07-08T11:59:21.656375Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6043488:0] 2025-07-08T11:59:21.656379Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2025-07-08T11:59:21.656386Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:6171112:0] 2025-07-08T11:59:21.656391Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:21:2:0:6043488:0] 2025-07-08T11:59:21.656396Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:37:3:0:6043488:0] 2025-07-08T11:59:21.656400Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:14:4:0:6171112:0] 2025-07-08T11:59:21.656404Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:10:3:0:6043488:0] 2025-07-08T11:59:21.656408Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:6171112:0] 2025-07-08T11:59:21.656413Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2025-07-08T11:59:21.656417Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:52:3:0:6043488:0] 2025-07-08T11:59:21.656421Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:25:3:0:6043488:0] 2025-07-08T11:59:21.656426Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:42:2:0:6043488:0] 2025-07-08T11:59:21.656433Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:6171112:0] GC for channel 4 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 35 Cleanups happened: 1 Old portions: 1 2 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 58 Cleaned up portions: 1 2 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 58 |65.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |65.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_2 [GOOD] |65.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |65.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |65.7%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-07-08T11:58:58.640618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:58.644767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:58.644829Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:58.645625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:58.645703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:58.645734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:58.645752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:58.645764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:58.645777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:58.645791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:58.645802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:58.645813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:58.645825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.645837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:58.645849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:58.651652Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:58.651847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:58.651860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:58.651901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.651948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:58.651962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:58.651970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:58.651980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:58.651989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:58.651996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:58.652000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:58.652018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.652026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:58.652034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:58.652038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:58.652048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:58.652056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:58.652063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:58.652067Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:58.652076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:58.652083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:58.652088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:58.652114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:58.652121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:58.652125Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:58.652147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:58.652156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:58.652160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:58.652174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:58.652181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.652186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.652194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:58.652202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:58.652209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:58.652214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:58.652257Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=13; 2025-07-08T11:58:58.652268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=7; 2025-07-08T11:58:58.652277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:58.652289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:58:58.652301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:58.652313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:58.652321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:58.652326Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:58.652340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:58.652346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... 2;count=215;size_of_portion=184; 2025-07-08T11:59:42.723838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=70;sum=22832;count=431; 2025-07-08T11:59:42.723844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=182;sum=43568;count=432;size_of_meta=112; 2025-07-08T11:59:42.723849Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=254;sum=59120;count=216;size_of_portion=184; 2025-07-08T11:59:42.723869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1728; 2025-07-08T11:59:42.723876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:42.723982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=100; 2025-07-08T11:59:42.723990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1870; 2025-07-08T11:59:42.723995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1884; 2025-07-08T11:59:42.724002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:42.724029Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=20; 2025-07-08T11:59:42.724034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1982; 2025-07-08T11:59:42.724057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=17; 2025-07-08T11:59:42.724076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:42.724102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=21; 2025-07-08T11:59:42.724121Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-07-08T11:59:42.725191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1058; 2025-07-08T11:59:42.726369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1155; 2025-07-08T11:59:42.726388Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-07-08T11:59:42.726395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:42.726402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:42.726417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2025-07-08T11:59:42.726425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:42.726442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=11; 2025-07-08T11:59:42.726448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T11:59:42.726463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T11:59:42.726476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=8; 2025-07-08T11:59:42.726494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-07-08T11:59:42.726499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=5633; 2025-07-08T11:59:42.726548Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108238352;raw_bytes=183045560;count=15;records=1915000} inactive {blob_bytes=205426288;raw_bytes=316809958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:42.726581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:42.726591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:42.726608Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:42.726616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:42.726648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:42.726668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:42.726684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975642771;tx_id=18446744073709551615;;current_snapshot_ts=1751975939699; 2025-07-08T11:59:42.726695Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:42.726706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:42.726711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:42.726735Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:42.735879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:42.736021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:42.736029Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:42.736033Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:42.736042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:42.736081Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:42.736096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975642771;tx_id=18446744073709551615;;current_snapshot_ts=1751975939699; 2025-07-08T11:59:42.736107Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:42.736121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:42.736147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:42.736172Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:42.736181Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-07-08T11:59:01.477068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:01.479644Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:01.479685Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:01.480301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:01.480350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:01.480380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:01.480399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:01.480414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:01.480434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:01.480456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:01.480476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:01.480494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:01.480512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.480530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:01.480545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:01.485817Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:01.486090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:01.486107Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:01.486143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.486180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:01.486193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:01.486199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:01.486211Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:01.486219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:01.486226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:01.486231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:01.486247Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.486255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:01.486262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:01.486266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:01.486276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:01.486283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:01.486289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:01.486294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:01.486302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:01.486309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:01.486314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:01.486337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:01.486343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:01.486348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:01.486369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:01.486377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:01.486381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:01.486394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:01.486401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.486405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.486412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:01.486420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:01.486426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:01.486431Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:01.486464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:59:01.486475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:01.486483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:01.486494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:01.486503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:01.486514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:01.486521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:01.486526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:01.486540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:01.486546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... 706;count=215;size_of_portion=184; 2025-07-08T11:59:43.007508Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=86;sum=23696;count=431; 2025-07-08T11:59:43.007515Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=198;sum=44432;count=432;size_of_meta=112; 2025-07-08T11:59:43.007520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=59984;count=216;size_of_portion=184; 2025-07-08T11:59:43.007536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1844; 2025-07-08T11:59:43.007543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:43.007638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=89; 2025-07-08T11:59:43.007642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1973; 2025-07-08T11:59:43.007647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1987; 2025-07-08T11:59:43.007654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:43.007678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=20; 2025-07-08T11:59:43.007683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2077; 2025-07-08T11:59:43.007704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=15; 2025-07-08T11:59:43.007721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-07-08T11:59:43.007761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=35; 2025-07-08T11:59:43.007779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-07-08T11:59:43.008876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1090; 2025-07-08T11:59:43.014361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5468; 2025-07-08T11:59:43.014396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-07-08T11:59:43.014404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=3; 2025-07-08T11:59:43.014411Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-07-08T11:59:43.014425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-07-08T11:59:43.014431Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:43.014447Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=10; 2025-07-08T11:59:43.014453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:43.014465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T11:59:43.014479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=8; 2025-07-08T11:59:43.014496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-07-08T11:59:43.014502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=9935; 2025-07-08T11:59:43.014552Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108275536;raw_bytes=198365560;count=15;records=1915000} inactive {blob_bytes=205496480;raw_bytes=345889958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:43.014585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:43.014594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:43.014611Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:43.014620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:43.014650Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:43.014668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:43.014681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645618;tx_id=18446744073709551615;;current_snapshot_ts=1751975942537; 2025-07-08T11:59:43.014688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:43.014698Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:43.014703Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:43.014725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:43.021554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:43.021711Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:43.021719Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:43.021723Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:43.021731Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:43.021759Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:43.021776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645618;tx_id=18446744073709551615;;current_snapshot_ts=1751975942537; 2025-07-08T11:59:43.021784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:43.021795Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:43.021800Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:43.021822Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:43.021831Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; >> TxUsage::Sinks_Oltp_WriteToTopics_3 >> KqpScanSpilling::SpillingPragmaParseError [GOOD] |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |65.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001c54/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 23745, MsgBus: 18234 2025-07-08T11:59:42.874362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679549483401147:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:42.874518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c54/r3tmp/tmpeumY9M/pdisk_1.dat 2025-07-08T11:59:43.094943Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23745, node 1 2025-07-08T11:59:43.137117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:43.137131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:43.137133Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:43.137185Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:59:43.189170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:43.189206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:43.193324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18234 TClient is connected to server localhost:18234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:59:43.332151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.335296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:43.347872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.417238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:43.497953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:43.527157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:43.868599Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:43.870399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.888350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.901243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.914633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.929350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.950848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.970128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:44.246666Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679558073338216:2448], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-07-08T11:59:44.247114Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmY5ZGY2Ni1hMTUxYzIyYS04NGY2MjBlYy1iYTUyOGY4MQ==, ActorId: [1:7524679558073338214:2447], ActorState: ExecuteState, TraceId: 01jzmyfz3h9janwe7vk48wt1bh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-07-08T11:58:58.593440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:58.597767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:58.597830Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:58.598539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:58.598596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:58.598632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:58.598653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:58.598669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:58.598689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:58.598705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:58.598721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:58.598737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:58.598753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.598770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:58.598787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:58.605073Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:58.605282Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:58.605296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:58.605338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.605389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:58.605403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:58.605409Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:58.605419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:58.605428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:58.605435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:58.605440Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:58.605459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:58.605466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:58.605474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:58.605478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:58.605489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:58.605496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:58.605503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:58.605508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:58.605517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:58.605525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:58.605532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:58.605557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:58.605564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:58.605569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:58.605589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:58.605597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:58.605602Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:58.605616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:58.605623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.605627Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:58.605635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:58.605644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:58.605650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:58.605655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:58.605697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:58:58.605712Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=11; 2025-07-08T11:58:58.605721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:58.605733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=8; 2025-07-08T11:58:58.605743Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:58.605754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:58.605763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:58.605768Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:58.605782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:58.605788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... 9706;count=215;size_of_portion=184; 2025-07-08T11:59:44.252050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=86;sum=23696;count=431; 2025-07-08T11:59:44.252057Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=198;sum=44432;count=432;size_of_meta=112; 2025-07-08T11:59:44.252063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=59984;count=216;size_of_portion=184; 2025-07-08T11:59:44.252083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1893; 2025-07-08T11:59:44.252091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:44.252194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=97; 2025-07-08T11:59:44.252202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2032; 2025-07-08T11:59:44.252207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2045; 2025-07-08T11:59:44.252214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:44.252239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2025-07-08T11:59:44.252245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2139; 2025-07-08T11:59:44.252269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=17; 2025-07-08T11:59:44.252287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:44.252313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=20; 2025-07-08T11:59:44.252332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-07-08T11:59:44.253533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1190; 2025-07-08T11:59:44.254752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1199; 2025-07-08T11:59:44.254765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-07-08T11:59:44.254773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:44.254779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:44.254794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-07-08T11:59:44.254800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:44.254816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=9; 2025-07-08T11:59:44.254822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T11:59:44.254837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:44.254850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=8; 2025-07-08T11:59:44.254868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-07-08T11:59:44.254874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=5779; 2025-07-08T11:59:44.254917Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108275528;raw_bytes=198365560;count=15;records=1915000} inactive {blob_bytes=205496480;raw_bytes=345889958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:44.254942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:44.254951Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:44.254965Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:44.254973Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:44.255003Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:44.255022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:44.255035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975642734;tx_id=18446744073709551615;;current_snapshot_ts=1751975939653; 2025-07-08T11:59:44.255044Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:44.255054Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.255060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.255082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:44.257615Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:44.257794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:44.257803Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:44.257808Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:44.257816Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:44.257843Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:44.257858Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975642734;tx_id=18446744073709551615;;current_snapshot_ts=1751975939653; 2025-07-08T11:59:44.257869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:44.257879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.257885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.257908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:44.257918Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-07-08T11:59:16.799080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:16.803109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:16.803164Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:16.803756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:16.803816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:16.803853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:16.803877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:16.803894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:16.803914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:16.803932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:16.803948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:16.803965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:16.803983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:16.804002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:16.804021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:16.808826Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:16.808993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:16.809004Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:16.809032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:16.809070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:16.809083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:16.809089Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:16.809098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:16.809105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:16.809112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:16.809116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:16.809136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:16.809143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:16.809151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:16.809155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:16.809163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:16.809170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:16.809177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:16.809181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:16.809189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:16.809196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:16.809200Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:16.809221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:16.809228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:16.809232Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:16.809252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:16.809259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:16.809263Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:16.809277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:16.809283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:16.809287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:16.809294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:16.809302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:16.809308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:16.809312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:16.809349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:59:16.809362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=10; 2025-07-08T11:59:16.809370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:16.809381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:16.809390Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:16.809401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:16.809407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:16.809412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:16.809424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:16.809430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... n_id:10;chunk_idx:3;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:4;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:5;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:6;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:7;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:8;blob_range:[NO_BLOB:0:8384];;column_id:10;chunk_idx:9;blob_range:[NO_BLOB:0:8656];;column_id:10;chunk_idx:10;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:11;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:12;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:13;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:14;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:15;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:16;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:17;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:18;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:19;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:20;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:21;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:22;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:23;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:24;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:25;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:26;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:27;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:28;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:29;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:30;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:31;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:32;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:33;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:34;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:35;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:36;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:37;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:38;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:39;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:40;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:41;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:42;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:43;blob_range:[NO_BLOB:0:9408];;column_id:10;chunk_idx:44;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:45;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:46;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:47;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:48;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:49;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:50;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:51;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:52;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:53;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:54;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:55;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:56;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:57;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:58;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:59;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:60;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:61;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:62;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:63;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:64;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:65;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:66;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:67;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:68;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:69;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:70;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:71;blob_range:[NO_BLOB:0:9424];;column_id:10;chunk_idx:72;blob_range:[NO_BLOB:0:9424];;;;switched=(portion_id:220;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););(portion_id:218;path_id:9438184000001;records_count:75000;schema_version:1;level:2;;column_size:7198464;index_size:0;meta:(()););(portion_id:221;path_id:9438184000001;records_count:75000;schema_version:1;level:1;;column_size:7200040;index_size:0;meta:(()););; 2025-07-08T11:59:43.857404Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=3; 2025-07-08T11:59:43.857419Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;event=on_execution;consumer=GENERAL_COMPACTION;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=9437184;parent_id=[1:5742:7736];task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:43.858096Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2025-07-08T11:59:43.859100Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:59;event=TTxWriteDraft; 2025-07-08T11:59:43.859108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=WriteDraft;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:43.956799Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=tx_draft.cpp:16;event=draft_completed; 2025-07-08T11:59:43.956846Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=7198464;count=779;actions=__MEMORY,__DEFAULT,;waiting=2;; 2025-07-08T11:59:44.254296Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-07-08T11:59:44.254355Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:121;from=0,0,0,0,;to=74999,74999,74999,74999,; 2025-07-08T11:59:44.254370Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=common_level.h:141;itFrom=1;itTo=1;raw=7069450;count=1;packed=7200040; 2025-07-08T11:59:44.254390Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=86;data_size=60;sum=89010;count=1749; 2025-07-08T11:59:44.254398Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:65;memory_size=182;data_size=172;sum=173010;count=1750;size_of_meta=112; 2025-07-08T11:59:44.254409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:40;memory_size=254;data_size=244;sum=236010;count=875;size_of_portion=184; 2025-07-08T11:59:44.254502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:44.254560Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-07-08T11:59:44.327970Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5742:7736];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:44.328380Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 2025-07-08T11:59:44.329308Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=36024048;raw_bytes=35366250;count=5;records=375200} inactive {blob_bytes=105325696;raw_bytes=102327000;count=216;records=1200200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:44.451996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:44.452024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-07-08T11:59:44.452037Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=with_appended.cpp:65;portions=222,;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:44.452178Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::803da88-5bf311f0-8e7716e1-ebc39f2d; 2025-07-08T11:59:44.452198Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;fline=granule.cpp:97;event=OnCompactionFinished;info=(granule:9438184000001;path_id:9438184000001;size:21623968;portions_count:222;); 2025-07-08T11:59:44.452207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:44.452230Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:44.452245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975660231;tx_id=18446744073709551615;;current_snapshot_ts=1751975958405; 2025-07-08T11:59:44.452254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:44.452264Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.452270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:44.452291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.894000s; 2025-07-08T11:59:44.452301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=803da88-5bf311f0-8e7716e1-ebc39f2d;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:44.452340Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 633 >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-07-08T11:59:42.720795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679547064745066:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:42.770674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010cd/r3tmp/tmpH0f3lY/pdisk_1.dat 2025-07-08T11:59:42.799382Z node 1 :HTTP ERROR: (#26,[::1]:11931) connection closed with error: Connection refused 2025-07-08T11:59:42.805090Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-07-08T11:59:42.805567Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:42.825392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:42.825406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:42.825407Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:42.825448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:59:42.873415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:42.873440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:42.877326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:43.715373Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TxUsage::WriteToTopic_Demo_17 [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> UpsertLoad::ShouldWriteKqpUpsert >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TxUsage::WriteToTopic_Demo_18_RestartNo >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType |65.8%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestPureProgram >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-07-08T11:59:46.030728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b44/r3tmp/tmpLiGUXh/pdisk_1.dat 2025-07-08T11:59:46.178516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:46.197255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:46.233532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:46.233576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:46.245383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:46.330915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:46.609889Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-07-08T11:59:46.609932Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-07-08T11:59:46.610383Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} started# 5 actors each with inflight# 4 2025-07-08T11:59:46.610390Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:46.610398Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:46.610403Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:46.610408Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:46.610412Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:46.611074Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} session: ydb://session/3?node_id=1&id=ODdkN2Y1NjgtNmRiZjZmMTktZmZjYTQ5N2UtOGNiOTdkMg== 2025-07-08T11:59:46.611303Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} session: ydb://session/3?node_id=1&id=ZDk3ZjJkYWUtYTEzM2ZkZGQtNzMyNWVmYjctNWVjMGIxOTU= 2025-07-08T11:59:46.611487Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} session: ydb://session/3?node_id=1&id=OTEyYTU5NWYtMWJhMTFhNDctMzU5YzdmYjAtZGZjMjM2YTU= 2025-07-08T11:59:46.611701Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} session: ydb://session/3?node_id=1&id=Nzc0Mzc4OTMtMjMzM2I2My0yNTIwMDU2Yi1mNTMzNDBkNw== 2025-07-08T11:59:46.611908Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} session: ydb://session/3?node_id=1&id=MTQyM2QzMjEtODgzNDg1MzYtNjczNTIwZDktNTAxMDdjZDY= 2025-07-08T11:59:46.866003Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} finished in 1751975986.865982s, errors=0 2025-07-08T11:59:46.866041Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} finished in 1751975986.866039s, errors=0 2025-07-08T11:59:46.866190Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1751975986865 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:46.866201Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} finished in 1751975986.866198s, errors=0 2025-07-08T11:59:46.866227Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1751975986866 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:46.866232Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} finished in 1751975986.866231s, errors=0 2025-07-08T11:59:46.866241Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1751975986866 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:46.866256Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1751975986866 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:46.866262Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} finished in 1751975986.866260s, errors=0 2025-07-08T11:59:46.866273Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1751975986866 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:46.866279Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} finished in 0.255924s, oks# 20, errors# 0 2025-07-08T11:59:46.866295Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> LocalPartition::DescribeHang [GOOD] >> LocalPartition::DiscoveryHang >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions |65.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |65.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-07-08T11:58:59.835210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:59.838131Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:59.838178Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:59.838977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:59.839037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:59.839084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:59.839108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:59.839126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:59.839147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:59.839166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:59.839182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:59.839200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:59.839219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.839247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:59.839266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:59.844926Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:59.845122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:59.845132Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:59.845158Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.845197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:59.845210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:59.845215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:59.845224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:59.845232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:59.845238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:59.845242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:59.845261Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:59.845268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:59.845275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:59.845279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:59.845287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:59.845294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:59.845301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:59.845305Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:59.845314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:59.845320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:59.845324Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:59.845346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:59.845353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:59.845357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:59.845376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:59.845384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:59.845388Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:59.845400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:59.845407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.845411Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:59.845418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:59.845426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:59.845432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:59.845436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:59.845471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=8; 2025-07-08T11:58:59.845481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:58:59.845488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:59.845499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:59.845508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:59.845519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:59.845526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:59.845531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:59.845542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:59.845546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... 06;count=215;size_of_portion=184; 2025-07-08T11:59:47.062382Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=86;sum=23696;count=431; 2025-07-08T11:59:47.062388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=198;sum=44432;count=432;size_of_meta=112; 2025-07-08T11:59:47.062394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=270;sum=59984;count=216;size_of_portion=184; 2025-07-08T11:59:47.062417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=2026; 2025-07-08T11:59:47.062426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:47.062542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=109; 2025-07-08T11:59:47.062551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2184; 2025-07-08T11:59:47.062556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2199; 2025-07-08T11:59:47.062563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=0; 2025-07-08T11:59:47.062587Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18; 2025-07-08T11:59:47.062592Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2298; 2025-07-08T11:59:47.062619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=20; 2025-07-08T11:59:47.062638Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:47.062665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=22; 2025-07-08T11:59:47.062683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=13; 2025-07-08T11:59:47.063841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1148; 2025-07-08T11:59:47.065258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1394; 2025-07-08T11:59:47.065276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-07-08T11:59:47.065283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:47.065288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:47.065302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-07-08T11:59:47.065308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:47.065324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=11; 2025-07-08T11:59:47.065330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:47.065347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:47.065361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=9; 2025-07-08T11:59:47.065378Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=12; 2025-07-08T11:59:47.065384Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=6220; 2025-07-08T11:59:47.065439Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108275536;raw_bytes=198365560;count=15;records=1915000} inactive {blob_bytes=205496480;raw_bytes=345889958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:47.065472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:47.065482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:47.065499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:47.065506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:47.065541Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:47.065561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:47.065577Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975643977;tx_id=18446744073709551615;;current_snapshot_ts=1751975940896; 2025-07-08T11:59:47.065585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:47.065596Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.065601Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.065623Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:47.067445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:47.067586Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:47.067593Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:47.067597Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:47.067603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:47.067627Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:47.067639Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975643977;tx_id=18446744073709551615;;current_snapshot_ts=1751975940896; 2025-07-08T11:59:47.067648Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:47.067658Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.067663Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.067681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:47.067691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4093:6068];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-07-08T11:59:47.383242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000b52/r3tmp/tmpMZwViH/pdisk_1.dat 2025-07-08T11:59:47.705958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:47.721435Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:47.757415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:47.757461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:47.769312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:47.858842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:48.126258Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-07-08T11:59:48.126301Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-07-08T11:59:48.126724Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} started# 5 actors each with inflight# 4 2025-07-08T11:59:48.126737Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:48.126746Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:48.126751Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:48.126755Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:48.126760Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-07-08T11:59:48.127591Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} session: ydb://session/3?node_id=1&id=OTQ3OTg2YTctYzNhYWM5MjctNTAzN2NkZmEtYTQ1NGFkNGI= 2025-07-08T11:59:48.127913Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} session: ydb://session/3?node_id=1&id=NDc5OWU3YjctMzVkOGE3MTItZTNjNDY1YmItYTdiNWFiMmY= 2025-07-08T11:59:48.128164Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} session: ydb://session/3?node_id=1&id=YTMwYjkyOWEtM2E4YTdhYi04NjI2NjIzZS00NWI2MTU2 2025-07-08T11:59:48.128369Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} session: ydb://session/3?node_id=1&id=MTdjYWFmYmEtMzBiMmJkMzEtODc0NGQxNzEtNGNkZmViZDk= 2025-07-08T11:59:48.128556Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} session: ydb://session/3?node_id=1&id=YjNjYmI4MDMtODcxMzVmM2ItNzFmODcxNDYtZTRkZGExY2M= 2025-07-08T11:59:48.318007Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 1} finished in 1751975988.317981s, errors=0 2025-07-08T11:59:48.318047Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 2} finished in 1751975988.318045s, errors=0 2025-07-08T11:59:48.318179Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1751975988317 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:48.318189Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 3} finished in 1751975988.318187s, errors=0 2025-07-08T11:59:48.318217Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1751975988318 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:48.318223Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 4} finished in 1751975988.318221s, errors=0 2025-07-08T11:59:48.318233Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1751975988318 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:48.318264Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1751975988318 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:48.318273Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:676:2570], subTag: 5} finished in 1751975988.318271s, errors=0 2025-07-08T11:59:48.318292Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:675:2569], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1751975988318 OperationsOK: 4 OperationsError: 0 } 2025-07-08T11:59:48.318299Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:675:2569], subTag: 2} finished in 0.191607s, oks# 20, errors# 0 2025-07-08T11:59:48.318326Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:676:2570] with tag# 2 |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |65.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |65.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-07-08T11:59:07.975064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:07.978857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:07.978895Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:07.979389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:07.979436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:07.979463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:07.979478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:07.979488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:07.979500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:07.979511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:07.979521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:07.979531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:07.979541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.979551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:07.979561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:07.983556Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:07.983758Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:07.983784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:07.983811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:07.983851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:07.983874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:07.983883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:07.983892Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:07.983901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:07.983908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:07.983913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:07.983945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:07.983954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:07.983961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:07.983965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:07.983974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:07.983981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:07.983988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:07.983992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:07.984001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:07.984008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:07.984013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:07.984035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:07.984043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:07.984047Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:07.984066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:07.984074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:07.984078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:07.984091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:07.984098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.984102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:07.984110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:07.984117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:07.984124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:07.984128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:07.984173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2025-07-08T11:59:07.984186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-07-08T11:59:07.984194Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:07.984204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:07.984214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:07.984225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:07.984232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:07.984237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:07.984251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:07.984256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... ion=184; 2025-07-08T11:59:47.543203Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=68;sum=787448;count=14327; 2025-07-08T11:59:47.543209Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=180;sum=1475192;count=14328;size_of_meta=112; 2025-07-08T11:59:47.543215Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=252;sum=1991000;count=7164;size_of_portion=184; 2025-07-08T11:59:47.543287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=30697; 2025-07-08T11:59:47.543300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:47.543461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=152; 2025-07-08T11:59:47.543472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=30905; 2025-07-08T11:59:47.543477Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=30917; 2025-07-08T11:59:47.543485Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:47.543551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=60; 2025-07-08T11:59:47.543557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=31051; 2025-07-08T11:59:47.543598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=32; 2025-07-08T11:59:47.543616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T11:59:47.543679Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=55; 2025-07-08T11:59:47.543722Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=38; 2025-07-08T11:59:47.562322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18576; 2025-07-08T11:59:47.586415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=24040; 2025-07-08T11:59:47.586458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-07-08T11:59:47.586465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:47.586472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:47.586487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-07-08T11:59:47.586494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:47.586510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=11; 2025-07-08T11:59:47.586517Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:47.586531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:47.586549Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=12; 2025-07-08T11:59:47.586565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=9; 2025-07-08T11:59:47.586571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=74999; 2025-07-08T11:59:47.586615Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22538992;raw_bytes=22128150;count=3;records=225200} inactive {blob_bytes=147791880;raw_bytes=143975050;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:47.586653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:47.586664Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:47.586681Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:47.586693Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:47.586729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:47.586747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:47.586764Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653529;tx_id=18446744073709551615;;current_snapshot_ts=1751975949804; 2025-07-08T11:59:47.586772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:47.586782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.586787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.586810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:47.588592Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:47.588923Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:47.588934Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:47.588938Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:47.588979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:47.589002Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:47.589014Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653529;tx_id=18446744073709551615;;current_snapshot_ts=1751975949804; 2025-07-08T11:59:47.589021Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:47.589031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.589036Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:47.589056Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:47.589063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9968:11570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |65.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-07-08T11:59:08.353048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:08.357494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:08.357547Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:08.358297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:08.358354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:08.358390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:08.358415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:08.358433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:08.358454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:08.358475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:08.358494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:08.358512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:08.358530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.358548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.358567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:08.364753Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:08.364984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:08.364995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:08.365024Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.365070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:08.365082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:08.365091Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:08.365101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:08.365110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:08.365117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:08.365122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:08.365144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.365152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:08.365161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:08.365165Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:08.365176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:08.365183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:08.365191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:08.365195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:08.365205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:08.365213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:08.365218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:08.365242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:08.365250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:08.365255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:08.365276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:08.365285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:08.365290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:08.365304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:08.365311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.365316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.365325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:08.365333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:08.365340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:08.365344Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:08.365381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:08.365392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=6; 2025-07-08T11:59:08.365401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:08.365412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:08.365424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:08.365436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:08.365444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:08.365450Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:08.365465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:08.365471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... _portion=184; 2025-07-08T11:59:47.994849Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=62;sum=787176;count=14327; 2025-07-08T11:59:47.994855Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=174;sum=1474920;count=14328;size_of_meta=112; 2025-07-08T11:59:47.994859Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=1990728;count=7164;size_of_portion=184; 2025-07-08T11:59:47.994925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7871; 2025-07-08T11:59:47.994934Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:47.995090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=149; 2025-07-08T11:59:47.995098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8063; 2025-07-08T11:59:47.995103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8075; 2025-07-08T11:59:47.995110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:47.995173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=59; 2025-07-08T11:59:47.995178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8206; 2025-07-08T11:59:47.995225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=39; 2025-07-08T11:59:47.995241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T11:59:47.995297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=50; 2025-07-08T11:59:47.995331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=29; 2025-07-08T11:59:48.002117Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6766; 2025-07-08T11:59:48.009745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=7581; 2025-07-08T11:59:48.009788Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2025-07-08T11:59:48.009796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:48.009802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:48.009818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=11; 2025-07-08T11:59:48.009825Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:48.009841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-07-08T11:59:48.009848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:48.009864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:48.009883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=13; 2025-07-08T11:59:48.009899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=10; 2025-07-08T11:59:48.009905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23853; 2025-07-08T11:59:48.009948Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22744072;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=149450960;raw_bytes=145316940;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:48.009995Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9849:11448];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:48.010005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9849:11448];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:48.010022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:48.010030Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:48.010086Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:48.010103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:48.010118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653931;tx_id=18446744073709551615;;current_snapshot_ts=1751975950183; 2025-07-08T11:59:48.010125Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:48.010134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.010139Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.010161Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:48.011335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:48.011440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:48.011445Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:48.011449Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:48.011454Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:48.011472Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:48.011480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653931;tx_id=18446744073709551615;;current_snapshot_ts=1751975950183; 2025-07-08T11:59:48.011487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:48.011495Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.011499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.011512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:48.011518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9849:11448];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-07-08T11:59:01.157469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:01.160313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:01.160370Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:01.160897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:01.160941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:01.160991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:01.161013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:01.161025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:01.161039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:01.161051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:01.161063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:01.161074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:01.161091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.161108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:01.161127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:01.166982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:01.167176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:01.167186Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:01.167236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.167277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:01.167287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:01.167292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:01.167299Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:01.167305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:01.167310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:01.167313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:01.167330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:01.167338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:01.167345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:01.167349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:01.167359Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:01.167366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:01.167373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:01.167378Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:01.167386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:01.167393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:01.167397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:01.167418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:01.167423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:01.167425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:01.167440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:01.167445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:01.167448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:01.167456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:01.167460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.167463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:01.167468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:01.167472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:01.167477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:01.167479Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:01.167510Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2025-07-08T11:59:01.167521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-07-08T11:59:01.167527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:59:01.167535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:01.167542Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:01.167551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:01.167556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:01.167560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:01.167569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:01.167572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... 842;count=215;size_of_portion=184; 2025-07-08T11:59:48.473844Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=70;sum=22832;count=431; 2025-07-08T11:59:48.473848Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=182;sum=43568;count=432;size_of_meta=112; 2025-07-08T11:59:48.473853Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=254;sum=59120;count=216;size_of_portion=184; 2025-07-08T11:59:48.473870Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1784; 2025-07-08T11:59:48.473878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T11:59:48.473991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=105; 2025-07-08T11:59:48.474002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1934; 2025-07-08T11:59:48.474006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1947; 2025-07-08T11:59:48.474013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:48.474036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18; 2025-07-08T11:59:48.474040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2036; 2025-07-08T11:59:48.474062Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=15; 2025-07-08T11:59:48.474079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:48.474101Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=17; 2025-07-08T11:59:48.474116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=11; 2025-07-08T11:59:48.475245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1121; 2025-07-08T11:59:48.476523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1266; 2025-07-08T11:59:48.476532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-07-08T11:59:48.476539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T11:59:48.476545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:48.476558Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=7; 2025-07-08T11:59:48.476564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=0; 2025-07-08T11:59:48.476578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=9; 2025-07-08T11:59:48.476584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:48.476599Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T11:59:48.476610Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=7; 2025-07-08T11:59:48.476626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=11; 2025-07-08T11:59:48.476631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=5646; 2025-07-08T11:59:48.476667Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=108238344;raw_bytes=183045560;count=15;records=1915000} inactive {blob_bytes=205426288;raw_bytes=316809958;count=39;records=3635000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:48.476692Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:48.476701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:48.476716Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:48.476725Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:48.476755Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:48.476772Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:48.476787Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645289;tx_id=18446744073709551615;;current_snapshot_ts=1751975942217; 2025-07-08T11:59:48.476796Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:48.476806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.476811Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.476830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:48.480141Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:48.480243Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:48.480249Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:48.480253Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:48.480259Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:48.480283Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T11:59:48.480295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975645289;tx_id=18446744073709551615;;current_snapshot_ts=1751975942217; 2025-07-08T11:59:48.480304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:48.480313Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.480318Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:48.480333Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T11:59:48.480341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:4070:6045];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-07-08T11:59:20.035795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.035821Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.085853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-07-08T11:59:20.087940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:175:2171], Recipient [1:71:2110]: NActors::TEvents::TEvPoison 2025-07-08T11:59:20.088058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:176:2067] recipient: [1:47:2094] Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:179:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:71:2110] sender: [1:180:2067] recipient: [1:178:2172] Leader for TabletID 72057594046678944 is [1:181:2173] sender: [1:182:2067] recipient: [1:178:2172] 2025-07-08T11:59:20.089505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:178:2172], Recipient [1:181:2173]: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:20.091261Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:178:2172], Recipient [1:181:2173]: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:20.091633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:20.091648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:20.091653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:20.091657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:20.091662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:20.091665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:20.091673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:20.091684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:20.091755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:20.093487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:20.093778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:178:2172], Recipient [1:181:2173]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:20.093829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:20.093860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:20.093880Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:181:2173]: TSystem::Undelivered 2025-07-08T11:59:20.093885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-07-08T11:59:20.093890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.093895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:20.093931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-07-08T11:59:20.093937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:20.094025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-07-08T11:59:20.094133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:20.094712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:20.094738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:20.094856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435081, Sender [1:181:2173], Recipient [1:181:2173]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-07-08T11:59:20.094862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-07-08T11:59:20.094931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:20.094981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:20.094988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:20.094993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:20.094996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-07-08T11:59:20.095008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:194:2173], Recipient [1:181:2173]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-07-08T11:59:20.095012Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-07-08T11:59:20.095016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:181:2173] sender: [1:212:2067] recipient: [1:24:2071] 2025-07-08T11:59:20.115728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:211:2190], Recipient [1:181:2173]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-07-08T11:59:20.115746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T11:59:20.133189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:20.133269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-07-08T11:59:20.133292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:20.133346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-07-08T11:59:20.133389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:59:20.133425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason ... 72057594046678944, status: OK, at schemeshard: 72075186233409549 2025-07-08T11:59:20.360913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125003, Sender [1:425:2338], Recipient [1:494:2385]: NKikimrScheme.TEvSyncTenantSchemeShard DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-07-08T11:59:20.360919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-07-08T11:59:20.360933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-07-08T11:59:20.360977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:20.360983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-07-08T11:59:20.361007Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:425:2338], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T11:59:20.361038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:20.361042Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T11:59:20.442349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.442378Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:20.458628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:176:2067] recipient: [2:47:2094] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:179:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:71:2110] sender: [2:180:2067] recipient: [2:178:2172] Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:182:2067] recipient: [2:178:2172] 2025-07-08T11:59:20.464449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.464471Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:181:2173] sender: [2:212:2067] recipient: [2:24:2071] 2025-07-08T11:59:20.486509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-07-08T11:59:20.491017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:248:2067] recipient: [2:239:2214] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:248:2067] recipient: [2:239:2214] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:244:2218] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:249:2067] recipient: [2:244:2218] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:251:2220] sender: [2:254:2067] recipient: [2:239:2214] Leader for TabletID 72075186233409547 is [2:256:2222] sender: [2:257:2067] recipient: [2:244:2218] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-07-08T11:59:20.494586Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:251:2220] sender: [2:290:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:256:2222] sender: [2:291:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-07-08T11:59:20.528373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:337:2286] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:337:2286] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:342:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:344:2290] sender: [2:345:2067] recipient: [2:337:2286] Leader for TabletID 72075186233409548 is [2:344:2290] sender: [2:346:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-07-08T11:59:20.592379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:417:2335] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:421:2067] recipient: [2:417:2335] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:423:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:423:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:424:2338] sender: [2:425:2067] recipient: [2:417:2335] 2025-07-08T11:59:20.600129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:20.600151Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:424:2338] sender: [2:452:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-07-08T11:59:20.635002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:20.635021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T11:59:20.635103Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.635125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T11:59:20.646661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-07-08T11:59:20.646723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-07-08T11:59:20.671845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:553:2442] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:553:2442] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:558:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:558:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:560:2446] sender: [2:561:2067] recipient: [2:553:2442] Leader for TabletID 72075186233409550 is [2:560:2446] sender: [2:562:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-07-08T11:59:23.181241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T11:59:23.181272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:23.248634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T11:59:23.248666Z node 2 :IMPORT WARN: Table profiles were not loaded >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-07-08T11:59:18.202505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.202529Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.259840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T11:59:18.262964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T11:59:18.263200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T11:59:18.268097Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-07-08T11:59:18.611193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:18.611217Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-07-08T11:59:18.627447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |65.9%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |65.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |65.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |65.9%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.0%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |66.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/executer_actor/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] |66.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |66.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |66.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |66.1%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |66.1%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |66.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-07-08T11:59:08.023656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:08.027595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:08.027636Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:08.028306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:08.028355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:08.028382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:08.028402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:08.028419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:08.028437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:08.028453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:08.028470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:08.028486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:08.028502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.028519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.028536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:08.034393Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:08.034582Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:08.034592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:08.034619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.034656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:08.034670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:08.034675Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:08.034685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:08.034693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:08.034700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:08.034705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:08.034726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.034734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:08.034740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:08.034745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:08.034754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:08.034761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:08.034768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:08.034773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:08.034781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:08.034788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:08.034792Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:08.034816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:08.034822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:08.034827Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:08.034845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:08.034853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:08.034857Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:08.034870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:08.034876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.034881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.034889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:08.034896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:08.034903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:08.034907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:08.034941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=9; 2025-07-08T11:59:08.034956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=10; 2025-07-08T11:59:08.034964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:08.034974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:08.034984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:08.034994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:08.035002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:08.035007Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:08.035019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:08.035025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... portion=184; 2025-07-08T11:59:51.169439Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=62;sum=787176;count=14327; 2025-07-08T11:59:51.169445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=174;sum=1474920;count=14328;size_of_meta=112; 2025-07-08T11:59:51.169451Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=246;sum=1990728;count=7164;size_of_portion=184; 2025-07-08T11:59:51.169525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=8696; 2025-07-08T11:59:51.169538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=2; 2025-07-08T11:59:51.169711Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=166; 2025-07-08T11:59:51.169720Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8912; 2025-07-08T11:59:51.169725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8925; 2025-07-08T11:59:51.169733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T11:59:51.169798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=60; 2025-07-08T11:59:51.169804Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9060; 2025-07-08T11:59:51.169840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=28; 2025-07-08T11:59:51.169859Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=13; 2025-07-08T11:59:51.169928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=63; 2025-07-08T11:59:51.169974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2025-07-08T11:59:51.178067Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8072; 2025-07-08T11:59:51.194796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=16674; 2025-07-08T11:59:51.194841Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-07-08T11:59:51.194850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=2; 2025-07-08T11:59:51.194856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T11:59:51.194874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=13; 2025-07-08T11:59:51.194882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T11:59:51.194899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=12; 2025-07-08T11:59:51.194905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-07-08T11:59:51.194919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-07-08T11:59:51.194939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=14; 2025-07-08T11:59:51.194957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=13; 2025-07-08T11:59:51.194963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=35274; 2025-07-08T11:59:51.195008Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22744072;raw_bytes=22320020;count=3;records=225200} inactive {blob_bytes=149450960;raw_bytes=145316940;count=221;records=1575200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:51.195048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9850:11449];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T11:59:51.195058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:9850:11449];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T11:59:51.195084Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T11:59:51.195094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T11:59:51.195153Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:51.195171Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:51.195185Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653603;tx_id=18446744073709551615;;current_snapshot_ts=1751975949855; 2025-07-08T11:59:51.195194Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:51.195205Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:51.195210Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:51.195234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:51.197216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T11:59:51.197379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T11:59:51.197386Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T11:59:51.197390Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T11:59:51.197397Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:51.197423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=9; 2025-07-08T11:59:51.197434Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975653603;tx_id=18446744073709551615;;current_snapshot_ts=1751975949855; 2025-07-08T11:59:51.197442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=9;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:51.197453Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:51.197458Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:51.197480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=1.000000s; 2025-07-08T11:59:51.197488Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:9850:11449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |66.2%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.2%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TOlapReboots::DropMultipleStandaloneTables |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] >> TOlapReboots::DropTableThenStore |66.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> TOlapReboots::CreateMultipleStandaloneTables >> TOlapReboots::CreateTable >> TOlapReboots::CreateDropTable |66.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |66.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> LocalPartition::DiscoveryHang [GOOD] >> LocalPartition::WithoutPartition >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo [GOOD] |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::InSubdomain >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> TSchemeShardTopicSplitMergeTest::Boot |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> EraseRowsTests::EraseRowsShouldSuccess |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-07-08T11:59:09.920761Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:09.923458Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:09.923501Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:09.923997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:09.924037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:09.924067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:09.924085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:09.924096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:09.924107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:09.924118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:09.924132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:09.924142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:09.924153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.924164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:09.924178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:09.928930Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:09.929031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:09.929041Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:09.929086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.929121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:09.929132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:09.929135Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:09.929141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:09.929147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:09.929153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:09.929156Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:09.929166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:09.929172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:09.929176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:09.929179Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:09.929185Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:09.929189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:09.929193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:09.929196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:09.929201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:09.929206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:09.929209Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:09.929224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:09.929238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:09.929243Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:09.929265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:09.929272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:09.929276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:09.929289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:09.929296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.929300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:09.929308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:09.929316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:09.929323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:09.929327Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:09.929356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=7; 2025-07-08T11:59:09.929362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=4; 2025-07-08T11:59:09.929368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=3; 2025-07-08T11:59:09.929376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:09.929382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:09.929390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:09.929395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:09.929398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:09.929406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:09.929410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;eve ... 57];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=360b0c8-5bf311f0-a80ac388-d9871e58; 2025-07-08T11:59:34.034137Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[254] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2025-07-08T11:59:34.036854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:125:2157];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=360b0c8-5bf311f0-a80ac388-d9871e58;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=360b0c8-5bf311f0-a80ac388-d9871e58; 2025-07-08T11:59:34.037188Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=129593352;raw_bytes=131330514;count=21;records=1575000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22114848;raw_bytes=22108350;count=3;records=225000} inactive {blob_bytes=370457464;raw_bytes=372291258;count=55;records=4050002} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T11:59:34.065248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=360b0c8-5bf311f0-a80ac388-d9871e58; 2025-07-08T11:59:34.065271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2025-07-08T11:59:34.065428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::360b0c8-5bf311f0-a80ac388-d9871e58; 2025-07-08T11:59:34.065444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T11:59:34.065461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=columnshard_impl.cpp:481;event=skip_compaction;reason=disabled; 2025-07-08T11:59:34.065470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=0; 2025-07-08T11:59:34.065483Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T11:59:34.065494Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:34.065499Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T11:59:34.065518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.502500s; 2025-07-08T11:59:34.065527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=360b0c8-5bf311f0-a80ac388-d9871e58;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T11:59:34.065568Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:6043488:0] 2025-07-08T11:59:34.065576Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:45:2:0:6171112:0] 2025-07-08T11:59:34.065580Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:46:3:0:6043488:0] 2025-07-08T11:59:34.065587Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:38:4:0:6043488:0] 2025-07-08T11:59:34.065592Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:24:2:0:6171112:0] 2025-07-08T11:59:34.065597Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2025-07-08T11:59:34.065601Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:16:3:0:6171112:0] 2025-07-08T11:59:34.065606Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2025-07-08T11:59:34.065611Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:43:3:0:6171112:0] 2025-07-08T11:59:34.065615Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6043488:0] 2025-07-08T11:59:34.065620Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:55:3:0:6171112:0] 2025-07-08T11:59:34.065624Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:36:2:0:6171112:0] 2025-07-08T11:59:34.065629Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:6043488:0] 2025-07-08T11:59:34.065633Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:27:2:0:6043488:0] 2025-07-08T11:59:34.065638Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:23:4:0:6043488:0] 2025-07-08T11:59:34.065642Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:51:2:0:6043488:0] 2025-07-08T11:59:34.065648Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:32:4:0:6043488:0] 2025-07-08T11:59:34.065653Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:30:2:0:6043488:0] 2025-07-08T11:59:34.065658Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:28:3:0:6043488:0] 2025-07-08T11:59:34.065663Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:34:3:0:1792:0] 2025-07-08T11:59:34.065667Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2025-07-08T11:59:34.065672Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:44:4:0:6043488:0] 2025-07-08T11:59:34.065676Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:49:3:0:6043488:0] 2025-07-08T11:59:34.065681Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:48:2:0:6171112:0] 2025-07-08T11:59:34.065686Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:52:3:0:6043488:0] 2025-07-08T11:59:34.065690Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:6043488:0] 2025-07-08T11:59:34.065694Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:40:3:0:6171112:0] 2025-07-08T11:59:34.065699Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:41:4:0:6043488:0] 2025-07-08T11:59:34.065703Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:12:2:0:6043488:0] 2025-07-08T11:59:34.065708Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2025-07-08T11:59:34.065714Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:6043488:0] 2025-07-08T11:59:34.065718Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6043488:0] 2025-07-08T11:59:34.065723Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:35:4:0:6043488:0] 2025-07-08T11:59:34.065727Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:42:2:0:6043488:0] 2025-07-08T11:59:34.065732Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:6171112:0] 2025-07-08T11:59:34.065739Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:54:2:0:6043488:0] 2025-07-08T11:59:34.065744Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:6043488:0] 2025-07-08T11:59:34.065748Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:57:2:0:6043488:0] 2025-07-08T11:59:34.065752Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6043488:0] 2025-07-08T11:59:34.065757Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:29:4:0:6171112:0] 2025-07-08T11:59:34.065761Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:26:4:0:6171112:0] 2025-07-08T11:59:34.065766Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:6171112:0] 2025-07-08T11:59:34.065770Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:31:3:0:6171112:0] 2025-07-08T11:59:34.065775Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:6171112:0] 2025-07-08T11:59:34.065779Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:53:4:0:6171112:0] 2025-07-08T11:59:34.065784Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:50:4:0:6171112:0] 2025-07-08T11:59:34.065789Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:33:2:0:6043488:0] 2025-07-08T11:59:34.065794Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:56:4:0:6043488:0] 2025-07-08T11:59:34.065798Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:39:2:0:6043488:0] 2025-07-08T11:59:34.065802Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2025-07-08T11:59:34.065807Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:47:4:0:6043488:0] 2025-07-08T11:59:34.065814Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:37:3:0:6171112:0] 2025-07-08T11:59:34.065818Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:25:3:0:6043488:0] 2025-07-08T11:59:34.065823Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6043488:0] 2025-07-08T11:59:34.065829Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:6171112:0] GC for channel 3 deletes blobs: WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 WAIT_CLEANING: 1 Compactions happened: 34 Cleanups happened: 1 Old portions: 1 2 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 58 Cleaned up portions: 1 2 4 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 58 |66.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit |66.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTGetWithRecoverFaultToleranceTestErasureMirror3of4 [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:55.899465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.899492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.899497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.899502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.899508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.899512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.899525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.899538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.899614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.912034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:55.912058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:55.915934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.916011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.916046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.917493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.917560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.917660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.917806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.918570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.918610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.918852Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.918861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.918879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.918886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.918892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.918920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.920199Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.939665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:55.939745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.939814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:55.939857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:55.939869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.941461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.941497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:55.941557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.941568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:55.941574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:55.941580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:55.942106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.942122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:55.942128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:55.942486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.942496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.942503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.942510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.943183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:55.943571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:55.943616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:55.943787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.943810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:55.943834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.943925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:55.943933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.943964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:55.943993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:55.944382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.944391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.944440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.944445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:55.944455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.944462Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:55.944473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.944478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.944482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.944486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.944490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:55.944496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.944501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:55.944505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:55.944516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:55.944522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:55.944527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:55.944912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:55.944925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-07-08T11:59:56.046729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-07-08T11:59:56.046734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T11:59:56.047254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T11:59:56.047319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T11:59:56.047326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T11:59:56.047389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-07-08T11:59:56.047395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-07-08T11:59:56.047401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-07-08T11:59:56.088170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.088214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.088227Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-07-08T11:59:56.088247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T11:59:56.097271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-07-08T11:59:56.097319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-07-08T11:59:56.097330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-07-08T11:59:56.097341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.097346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T11:59:56.097395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-07-08T11:59:56.097444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T11:59:56.097456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:59:56.102010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.102162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.102170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T11:59:56.102214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:56.102251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.102256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-07-08T11:59:56.102261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T11:59:56.102351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.102360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-07-08T11:59:56.102374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:59:56.102377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:56.102382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T11:59:56.102388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:56.102392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T11:59:56.102398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T11:59:56.102403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T11:59:56.102407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T11:59:56.102438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T11:59:56.102443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-07-08T11:59:56.102447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-07-08T11:59:56.102450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T11:59:56.102685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:56.102697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:56.102701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:59:56.102706Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-07-08T11:59:56.102710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T11:59:56.102934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:56.102943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T11:59:56.102946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T11:59:56.102950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T11:59:56.102953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:59:56.102962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-07-08T11:59:56.102967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:415:2381] 2025-07-08T11:59:56.104273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:59:56.104302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T11:59:56.104317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.104323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:552:2487] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-07-08T11:59:56.107517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.107560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.107603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-07-08T11:59:56.107998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.108024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T11:59:56.108070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T11:59:56.108076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T11:59:56.108134Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:56.108148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.108152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:648:2572] TestWaitNotification: OK eventTxId 105 >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx4 [GOOD] Test command err: iteration# 4 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 10 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 16 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 22 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 28 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 34 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 40 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 46 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 52 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 58 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 64 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 70 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 76 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 82 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 88 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 94 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 100 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 106 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 112 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 118 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 124 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 130 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 136 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 142 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 148 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 154 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 160 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 166 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 172 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 178 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 184 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 190 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 196 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 202 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 208 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 214 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 220 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 226 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 232 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 238 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 244 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 250 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 256 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 262 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 268 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 274 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 280 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 286 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 292 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 298 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 304 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 310 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 316 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 322 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 328 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 334 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 340 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 346 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 352 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 358 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 364 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 370 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 376 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 382 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 388 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 394 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 400 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 406 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 412 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 418 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 424 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 430 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 436 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 442 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 448 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 454 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 460 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 466 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 472 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 478 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 484 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:55.988312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.988335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.988341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.988345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.988350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.988354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.988368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.988380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.988444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.016775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:56.016791Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.026136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.026193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:56.026219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:56.027719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:56.027772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:56.027861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.028042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:56.029288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.029323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:56.029535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.029547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.029564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:56.029571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.029577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:56.029600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.030742Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:56.046978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.047034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.047081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:56.047119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:56.047128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.047653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.047677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:56.047707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.047716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:56.047720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:56.047728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:56.048092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.048103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:56.048107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:56.048406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.048416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.048422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.048427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.048986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:56.049375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:56.049411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:56.049577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.049600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.049617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.049686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:56.049693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.049719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:56.049730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:56.050117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.050125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.050162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.050167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:56.050178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.050186Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:56.050196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.050200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.050205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.050208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.050212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:56.050217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.050221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:56.050225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:56.050236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:56.050241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:56.050245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:56.050606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:56.050621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... ionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:56.472883Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.472888Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:56.472893Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:56.472897Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:56.472901Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:56.472909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.472919Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:56.481079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.482416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.482765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:56.482799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:56.482827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:56.482831Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.482874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:56.482958Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-07-08T11:59:56.482974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T11:59:56.482980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T11:59:56.482988Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.482996Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T11:59:56.483078Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483087Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-07-08T11:59:56.483109Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483138Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T11:59:56.483161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T11:59:56.483164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T11:59:56.483167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T11:59:56.483180Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483188Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483218Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T11:59:56.483290Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483362Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483370Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483399Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483417Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483437Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483473Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483503Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483529Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.483540Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.489621Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:56.490765Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.490783Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.490831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:56.490840Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.490856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:56.491722Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:641:2563] sender: [2:701:2058] recipient: [2:15:2062] 2025-07-08T11:59:56.545065Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:56.545127Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 86us result status StatusSuccess 2025-07-08T11:59:56.545262Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:56.447961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:56.447994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.448000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:56.448004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:56.448009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:56.448013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:56.448033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.448044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:56.448106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.460831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:56.460847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.464157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.464195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:56.464231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:56.465513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:56.465569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:56.465656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.465818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:56.466705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.466738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:56.466941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.466951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.466965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:56.466971Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.466977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:56.466999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.467933Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:56.493191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.493244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.493293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:56.493329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:56.493339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.501236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.501268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:56.501309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.501319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:56.501323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:56.501328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:56.501766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.501775Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:56.501780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:56.502051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.502059Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.502064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.502070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.502647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:56.502977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:56.503009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:56.503164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.503182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.503198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.503267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:56.503274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.503300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:56.503310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:56.503648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.503654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.503689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.503694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:56.503703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.503709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:56.503719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.503722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.503727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.503730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.503736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:56.503741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.503745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:56.503749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:56.503758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:56.503763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:56.503767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:56.504142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:56.504155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.639399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T11:59:56.639448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T11:59:56.639454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T11:59:56.639513Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:56.639525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.639529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:648:2572] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-07-08T11:59:56.640097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.640126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.640163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-07-08T11:59:56.640530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.640550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T11:59:56.640588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-07-08T11:59:56.640593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-07-08T11:59:56.640637Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-07-08T11:59:56.640650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.640654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:655:2579] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-07-08T11:59:56.641149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.641170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.641195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-07-08T11:59:56.641543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.641562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-07-08T11:59:56.641600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-07-08T11:59:56.641604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-07-08T11:59:56.641649Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-07-08T11:59:56.641661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.641665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:662:2586] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-07-08T11:59:56.642082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.642102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.642126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-07-08T11:59:56.647948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.647993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-07-08T11:59:56.648058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-07-08T11:59:56.648064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-07-08T11:59:56.648129Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-07-08T11:59:56.648146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.648150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:669:2593] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-07-08T11:59:56.648707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.648739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.648794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-07-08T11:59:56.649339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.649367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-07-08T11:59:56.649415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-07-08T11:59:56.649420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-07-08T11:59:56.649474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-07-08T11:59:56.649491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-07-08T11:59:56.649496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:676:2600] TestWaitNotification: OK eventTxId 109 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:56.122383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:56.122403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.122407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:56.122412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:56.122417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:56.122420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:56.122432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:56.122444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:56.122505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.140412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:56.140431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.144351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.144405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:56.144431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:56.145913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:56.145971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:56.146062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.146233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:56.147092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.147131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:56.147351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.147363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.147381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:56.147388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.147395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:56.147419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.148986Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:56.170907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.170991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.171062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:56.171110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:56.171121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.173348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.173386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:56.173447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.173460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:56.173466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:56.173472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:56.177276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.177297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:56.177304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:56.179728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.179746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.179755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.179764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.180460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:56.180965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:56.181011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:56.181214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.181240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.181261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.181350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:56.181359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.181393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:56.181407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:56.182034Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.182044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.182097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.182147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:56.182158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.182166Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:56.182179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.182184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.182190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.182193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.182200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:56.182206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.182210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:56.182215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:56.182228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:56.182234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:56.182239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:56.182688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:56.182702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... riber for txId 105: send EvNotifyTxCompletion 2025-07-08T11:59:57.059166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T11:59:57.059235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:57.059240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-07-08T11:59:57.059244Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-07-08T11:59:57.097833Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:57.097877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:57.097890Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-07-08T11:59:57.097898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T11:59:57.110225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-07-08T11:59:57.110283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-07-08T11:59:57.110293Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-07-08T11:59:57.110306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T11:59:57.110311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T11:59:57.110359Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-07-08T11:59:57.110394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:59:57.110998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:57.111134Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:57.111142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T11:59:57.111198Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:57.111204Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-07-08T11:59:57.111215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T11:59:57.111222Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-07-08T11:59:57.111233Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:57.111237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:57.111243Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T11:59:57.111247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:57.111251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-07-08T11:59:57.111257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T11:59:57.111262Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T11:59:57.111270Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T11:59:57.111295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T11:59:57.111300Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-07-08T11:59:57.111304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-07-08T11:59:57.111468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:57.111480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T11:59:57.111485Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-07-08T11:59:57.111489Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-07-08T11:59:57.111494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T11:59:57.111506Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-07-08T11:59:57.111511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:405:2371] 2025-07-08T11:59:57.112463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T11:59:57.112485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T11:59:57.112491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:670:2591] TestWaitNotification: OK eventTxId 105 2025-07-08T11:59:57.113903Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T11:59:57.113951Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 58us result status StatusSuccess 2025-07-08T11:59:57.114140Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-07-08T11:59:56.293415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00121f/r3tmp/tmpqp0Q3K/pdisk_1.dat 2025-07-08T11:59:56.445832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.464846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.505431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:56.505464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:56.516193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:56.605433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.621793Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:56.621871Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.631156Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.631198Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:56.631358Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:56.631369Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:56.631375Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:56.631440Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:56.631466Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:56.631480Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:56.645163Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:56.648920Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:56.653965Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:56.653998Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:56.654003Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:56.654008Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:56.654013Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.654169Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:56.654191Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:56.654205Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.654211Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:56.654219Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:56.654224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.654319Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:56.654355Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.654400Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:56.654416Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:56.654683Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:56.665140Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.665177Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:56.809521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:56.810354Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:56.810374Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.810504Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.810513Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:56.810523Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:56.810585Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:56.810615Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:56.810706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.810717Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:56.811058Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:56.811158Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:56.811379Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:56.811386Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.811523Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:56.811533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.811716Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.811726Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:56.811732Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:56.811746Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:56.811754Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:56.811763Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.812518Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:56.812725Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:56.812733Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:56.812843Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:56.860368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygbcf6djbx7yt86h4jq3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTAzN2JiZjItNWM1ZDNkYzEtYzFkZWQ1ZTgtNjdkNTBkMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:56.861561Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:56.861629Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.882404Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.882454Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.883434Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:56.883472Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:56.903861Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:56.903888Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.903942Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:56.904181Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.904275Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.904328Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.904335Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:56.904343Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:56.904381Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:56.904391Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.904534Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T11:59:56.904593Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T11:59:56.904620Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T11:59:56.904627Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T11:59:56.904725Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:56.904732Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T11:59: ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.884937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.897406Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:604:2520] 2025-07-08T11:59:57.897465Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.906327Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.906368Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.906543Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.906552Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.906560Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.906609Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.906631Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.906641Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:620:2520] in generation 1 2025-07-08T11:59:57.917152Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.917188Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.917219Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.917236Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:622:2530] 2025-07-08T11:59:57.917241Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.917246Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.917252Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.917376Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.917401Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.917513Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.917521Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.917531Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.917536Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.917550Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:600:2517], serverId# [2:611:2524], sessionId# [0:0:0] 2025-07-08T11:59:57.917581Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.917643Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.917663Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.917991Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.928344Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.928387Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:58.088284Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:640:2542], serverId# [2:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:58.088472Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:58.088482Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.088517Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.088525Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:58.088535Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:58.088606Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:58.088638Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:58.088802Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.088817Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:58.088914Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:58.089020Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.089360Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:58.089367Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.089456Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:58.089464Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.089630Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.089637Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:58.089643Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:58.089658Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:58.089667Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:58.089676Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.089830Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:58.090212Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:58.090221Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:58.090277Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:58.090986Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:676:2570], serverId# [2:677:2571], sessionId# [0:0:0] 2025-07-08T11:59:58.091018Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.113207Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.113241Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.113335Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:676:2570], serverId# [2:677:2571], sessionId# [0:0:0] 2025-07-08T11:59:58.113816Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:682:2576], serverId# [2:683:2577], sessionId# [0:0:0] 2025-07-08T11:59:58.113850Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.113904Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.113912Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.113933Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:682:2576], serverId# [2:683:2577], sessionId# [0:0:0] 2025-07-08T11:59:58.114104Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:687:2581], serverId# [2:688:2582], sessionId# [0:0:0] 2025-07-08T11:59:58.114118Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.114131Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.114135Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.114156Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:687:2581], serverId# [2:688:2582], sessionId# [0:0:0] 2025-07-08T11:59:58.114319Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:692:2586], serverId# [2:693:2587], sessionId# [0:0:0] 2025-07-08T11:59:58.114332Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.114344Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.114348Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.114365Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:692:2586], serverId# [2:693:2587], sessionId# [0:0:0] 2025-07-08T11:59:58.114514Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:697:2591], serverId# [2:698:2592], sessionId# [0:0:0] 2025-07-08T11:59:58.114526Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.114547Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.114551Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.114570Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:697:2591], serverId# [2:698:2592], sessionId# [0:0:0] 2025-07-08T11:59:58.114724Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:702:2596], serverId# [2:703:2597], sessionId# [0:0:0] 2025-07-08T11:59:58.114737Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.114750Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.114754Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.114773Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:702:2596], serverId# [2:703:2597], sessionId# [0:0:0] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> TSchemeShardTest::MkRmDir >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> TSchemeShardTest::InitRootAgain ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-07-08T11:59:57.262530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001221/r3tmp/tmpunOFSj/pdisk_1.dat 2025-07-08T11:59:57.404013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.422449Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:57.457464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:57.457502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:57.469410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.547092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.566160Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:57.566251Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.575065Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.575107Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.575259Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.575268Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.575274Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.575339Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.575356Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.575367Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:57.585712Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.594636Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.594726Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.594769Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:57.594775Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.594781Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.594787Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.594971Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.594996Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.595014Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.595021Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.595031Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.595036Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.595153Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:57.595198Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.595263Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.595283Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.595597Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.606026Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.606078Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:57.757489Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:57.758408Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:57.758430Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.758576Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.758586Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:57.758598Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:57.758669Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:57.758708Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:57.758838Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.758853Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:57.759232Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:57.759354Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.759616Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:57.759625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.759770Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:57.759780Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.759956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.759963Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.759969Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:57.759983Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:57.759992Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:57.760000Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.760747Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.760934Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:57.760941Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:57.761172Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:57.854047Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygca2b09rhb41s5a13v9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWM4OTRmMzgtZjMwMGZlOGEtOTE4Yzg5YjktYjc4OTE4MGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:57.855249Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:57.855331Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.877176Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.877228Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.878294Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.878540Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:57.901124Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:57.901149Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.901207Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:57.901215Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:57.901289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.901299Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.901306Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.901316Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.901334Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.901546Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.901621Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.901671Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.901678Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.901683Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:57.901718Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.901725Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.901859Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T11:59:58.875979Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:58.876039Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:58.876219Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:58.876230Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:58.876238Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:58.876288Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:58.876313Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:58.876326Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:620:2520] in generation 1 2025-07-08T11:59:58.887205Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:58.887243Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:58.887275Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:58.887290Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:622:2530] 2025-07-08T11:59:58.887295Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:58.887300Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:58.887305Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.887712Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:58.887742Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:58.887854Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.887862Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.887871Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:58.887876Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.887891Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:600:2517], serverId# [2:611:2524], sessionId# [0:0:0] 2025-07-08T11:59:58.887921Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.887983Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:58.888016Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:58.888653Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:58.900360Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.900412Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:59.057983Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:640:2542], serverId# [2:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:59.058239Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:59.058259Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.058312Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.058322Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:59.058337Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:59.058428Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:59.058469Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:59.058697Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.058719Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:59.058844Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:59.058956Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.059588Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:59.059606Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.059770Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:59.059786Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.060057Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.060069Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:59.060076Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:59.060095Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:59.060107Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:59.060117Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.060351Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:59.060796Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:59.060808Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:59.060866Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:59.082686Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygdjpb00psptb62n1nbr5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc4YzI4NGEtMjg1ODE5YTctYTkzYmRhYy03MWEzNjA0OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:59.082885Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:685:2577], serverId# [2:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:59.082960Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:59.104074Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:59.104141Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.105360Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:59.105655Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:59.126471Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:59.126512Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.126603Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:59.126614Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:59.126702Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.126714Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.126724Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:59.126740Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.126761Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:59.127022Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:59.127130Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:59.127174Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.127179Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.127187Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:59.127236Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.127245Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.127467Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T11:59:59.127530Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T11:59:59.127554Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T11:59:59.127560Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T11:59:59.127640Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:59.127645Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T11:59:59.127667Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.127672Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.127679Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T11:59:59.127714Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.127722Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.127729Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-07-08T11:59:56.786459Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011f5/r3tmp/tmp2aq6e8/pdisk_1.dat 2025-07-08T11:59:56.929876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.949737Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.985446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:56.985482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:56.997410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.082766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.103159Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:57.103249Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.118009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.118052Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.118205Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.118216Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.118223Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.118284Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.118308Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.118322Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:57.128619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.137030Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.137091Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.137113Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:57.137119Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.137124Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.137129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.137283Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.137303Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.137319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.137325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.137333Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.137338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.137428Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:57.137463Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.137515Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.137532Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.137815Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.149158Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.149194Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:57.322747Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:57.323681Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:57.323702Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.323845Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.323857Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:57.323868Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:57.323931Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:57.323965Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:57.324084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.324099Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:57.324487Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:57.324589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.324837Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:57.324846Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.325044Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:57.325056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.325258Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.325267Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.325273Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:57.325287Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:57.325296Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:57.325305Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.325978Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.326158Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:57.326165Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:57.326267Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:57.414479Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygbwf0b0pry5vwv9724h6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmU2MjgyMjItMmE3YzA5ZTItODFjNjdiM2YtMTU0MDk2Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:57.415586Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:57.415671Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.441208Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.441275Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.442482Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.442769Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:57.463299Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:57.463328Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.463392Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:57.463402Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:57.463481Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.463489Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.463499Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.463509Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.463527Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.463771Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.463867Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.463920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.463925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.463932Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:57.463974Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.463981Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.464131Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T11:59:58.568201Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:58.568245Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:58.568411Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:58.568420Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:58.568428Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:58.568471Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:58.568491Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:58.568503Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:620:2520] in generation 1 2025-07-08T11:59:58.581259Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:58.581289Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:58.581317Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:58.581331Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:622:2530] 2025-07-08T11:59:58.581336Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:58.581341Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:58.581346Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.581451Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:58.581471Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:58.581557Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.581564Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.581570Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:58.581576Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.581587Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:600:2517], serverId# [2:611:2524], sessionId# [0:0:0] 2025-07-08T11:59:58.581613Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.581665Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:58.581681Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:58.581982Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:58.593720Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.593762Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:58.757677Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:640:2542], serverId# [2:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:58.757850Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:58.757861Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.757894Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.757902Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:58.757910Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:58.757971Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:58.757998Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:58.758139Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.758153Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:58.758242Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:58.758321Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.758655Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:58.758665Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.758759Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:58.758769Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.758943Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.758952Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:58.758957Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:58.758973Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:58.758982Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:58.758991Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.759151Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:58.759553Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:58.759566Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:58.759626Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:58.772120Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygd98d464vaz8j0yz7p32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODMwYWNkZDUtYWNjY2ZkYTMtMzkwZDdmYWYtNDg1N2NkZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:58.772282Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:685:2577], serverId# [2:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:58.772341Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.797161Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.797207Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.798066Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:58.798303Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.825292Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.825325Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.825403Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:58.825415Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:58.825489Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.825499Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.825508Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:58.825521Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.825540Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:58.825765Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.825848Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.825885Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.825890Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:58.825898Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:58.825938Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:58.825945Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.826103Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T11:59:58.826156Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T11:59:58.826178Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T11:59:58.826184Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T11:59:58.826261Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:58.826267Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T11:59:58.826286Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.826291Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:58.826296Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T11:59:58.826323Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.826330Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.826338Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |66.4%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut >> BasicUsage::ReadSessionCorrectClose [GOOD] >> BasicUsage::ConflictingWrites >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-07-08T11:59:56.104374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00123d/r3tmp/tmpjU1Ekm/pdisk_1.dat 2025-07-08T11:59:56.228555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.247574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.293599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:56.293631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:56.305423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:56.388799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.412350Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:56.412438Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:56.422420Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:56.422458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:56.422605Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:56.422614Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:56.422620Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:56.422676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:56.422691Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:56.422701Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:56.435527Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:56.439711Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:56.439775Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:56.439797Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:56.439802Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:56.439807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:56.439812Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.440229Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:56.440258Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:56.440277Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.440284Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:56.440291Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:56.440297Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.440409Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:56.440449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.440497Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:56.440514Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:56.440797Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:56.451098Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.451134Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:56.619032Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:56.619855Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:56.619867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.619972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.619992Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:56.620002Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:56.620055Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:56.620085Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:56.620170Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.620182Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:56.620565Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:56.620660Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:56.620886Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:56.620892Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.621057Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:56.621070Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.621276Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.621286Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:56.621292Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:56.621308Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:56.621316Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:56.621326Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.622115Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:56.622314Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:56.622321Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:56.622441Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:56.673080Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygb6fdfzdtzq19aa0hm46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIzMDJmMzktMmI5YjdhYmUtZWMzNmQ0YzYtMjAwOWRjNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:56.674086Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:56.674159Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.694933Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.694992Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.696081Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:56.696358Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:56.725246Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:56.725278Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:56.725338Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:56.725346Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:56.725422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.725430Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:56.725438Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:56.725452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.725471Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:56.725685Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:56.725760Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:56.725802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:56.725807Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:56.725813Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:56.725849Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:56.725856Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:56.725980Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T11:59:59.163077Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:59.163118Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:59.163281Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:59.163290Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:59.163297Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:59.163345Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:59.163365Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:59.163376Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:619:2519] in generation 1 2025-07-08T11:59:59.175089Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:59.175117Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:59.175145Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:59.175157Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:621:2529] 2025-07-08T11:59:59.175162Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:59.175166Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:59.175170Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.175274Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:59.175294Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:59.175309Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.175315Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.175322Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:59.175327Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.175411Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:599:2516], serverId# [3:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:59.175444Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:59.175490Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:59.175506Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:59.175774Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:59.186052Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:59.186087Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:59.353653Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:640:2542], serverId# [3:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:59.353748Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:59.353757Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.353964Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.353973Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:59.353982Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:59.354039Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:59.354068Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:59.354139Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.354153Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:59.354237Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:59.354310Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.354583Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:59.354589Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.354712Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:59.354721Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.354850Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.354857Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:59.354863Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:59.354876Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:59.354884Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:59.354893Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.355092Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:59.355359Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:59.355389Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:59.355395Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:59.391581Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygdvw32d94xrb59j9nzhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2QyOGJjYjQtMzYxNDBlMC05NjEyYTU0My0zMWNiMWNmOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:59.391732Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:685:2577], serverId# [3:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:59.391794Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:59.416503Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:59.416556Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.417554Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:59.417806Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:59.438751Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:59.438777Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:59.438843Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:59.438852Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:59.438938Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.438945Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.438953Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:59.438966Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.438982Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:59.439203Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:59.439279Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:59.439309Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.439314Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.439320Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:59.439358Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.439364Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.439464Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T11:59:59.439518Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T11:59:59.439540Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T11:59:59.439545Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T11:59:59.439615Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:59.439619Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T11:59:59.439634Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:59.439639Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:59.439643Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T11:59:59.439668Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:59.439674Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:59.439680Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> Viewer::SelectStringWithNoBase64Encoding >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> Viewer::JsonAutocompleteSimilarDatabaseName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-07-08T11:59:56.823370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011ea/r3tmp/tmpGWlfVO/pdisk_1.dat 2025-07-08T11:59:56.948282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.965168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.997360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:56.997398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:57.008125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.089284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.106226Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:57.106327Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.114623Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.114662Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.114821Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.114832Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.114839Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.114902Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.114920Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.114933Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:57.125261Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.129661Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.129709Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.129727Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:57.129732Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.129737Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.129742Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.129878Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.129900Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.129917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.129923Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.129932Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.129936Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.130043Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:57.130093Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.130154Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.130174Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.130457Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.140670Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.140702Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:57.301544Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:57.302414Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:57.302436Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.302589Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.302600Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:57.302612Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:57.302684Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:57.302720Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:57.302823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.302841Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:57.303266Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:57.303386Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.303642Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:57.303652Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.303807Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:57.303818Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.304056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.304068Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.304074Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:57.304092Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:57.304102Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:57.304113Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.305052Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.305326Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:57.305338Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:57.305706Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:57.400523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygbvv32w85acvez2vgany, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM2ODYwMWMtOWI5MzJiZWEtNzRiNWJhZGEtMWVhNjVjMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:57.401402Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:57.401476Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.422776Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.422828Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.423821Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.424099Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:57.451493Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:57.451524Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.451588Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:57.451597Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:57.451691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.451701Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.451711Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.451723Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.451745Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:57.452018Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.452116Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.452171Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.452176Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.452184Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:57.452230Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:57.452237Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.452364Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T12:00:00.140102Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.140147Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:00.140317Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:00.140327Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:00.140334Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:00.140381Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:00.140404Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:00.140416Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:619:2519] in generation 1 2025-07-08T12:00:00.153220Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:00.153257Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:00.153290Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:00.153306Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:621:2529] 2025-07-08T12:00:00.153311Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:00.153315Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:00.153320Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.153448Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:00.153475Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:00.153494Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.153500Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.153509Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:00.153515Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.153621Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:599:2516], serverId# [3:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:00.153657Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.153714Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:00.153732Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:00.154054Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:00.164899Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.164968Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:00.338764Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:640:2542], serverId# [3:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:00.338888Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:00.338898Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.339168Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.339181Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:00.339192Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:00.339258Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:00.339293Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:00.339391Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.339406Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:00.339500Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:00.339589Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.339943Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:00.339954Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.340103Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:00.340115Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.340271Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.340280Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:00.340285Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:00.340303Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:00.340314Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:00.340324Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.340569Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:00.340905Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:00.340944Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:00.340974Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:00.365502Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygetp8epyqk3rxtq38f0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWRiOWRiNTgtYWRkMDA4OTAtYmI3MWIzMmMtMTcxMjc5ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:00.365686Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:685:2577], serverId# [3:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:00.365753Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.387296Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.387356Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.388532Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:00.388891Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:00.409681Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:00.409721Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.409799Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:00.409808Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:00.409913Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.409924Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.409933Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:00.409948Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.409966Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:00.410218Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.410312Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.410356Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.410363Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:00.410370Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:00.410418Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:00.410426Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.410562Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T12:00:00.410655Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:00.410690Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T12:00:00.410697Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T12:00:00.410791Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:00.410797Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T12:00:00.410823Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.410829Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:00.410835Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:00.410867Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.410877Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.410885Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> TSchemeShardTest::ParallelCreateSameTable >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns >> Viewer::TabletMerging >> Viewer::JsonAutocompleteStartOfDatabaseName >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TSchemeShardCheckProposeSize::CopyTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-07-08T11:58:53.757552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.761165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.761221Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.761963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.762026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.762061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.762084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.762102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.762124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.762141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.762158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.762174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.762190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.762208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.762229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.768042Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.768233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.768245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.768289Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.768340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.768357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.768363Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.768373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.768382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.768389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.768394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.768417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.768426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.768434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.768438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.768448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.768455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.768462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.768467Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.768475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.768482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.768487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.768510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.768517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.768521Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.768543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.768551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.768556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.768569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.768576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.768581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.768589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.768597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.768603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.768607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.768653Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=16; 2025-07-08T11:58:53.768662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:58:53.768670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:58:53.768680Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:53.768690Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.768703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.768711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.768716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.768729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.768734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... ode 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=49; 2025-07-08T11:59:56.863713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-07-08T11:59:56.863724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T11:59:56.863763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.863773Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-07-08T11:59:56.863782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T11:59:56.864332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:56.864367Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864374Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T11:59:56.864385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-07-08T11:59:56.864401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-07-08T11:59:56.864463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:10319:12331];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-07-08T11:59:56.864482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T11:59:56.864593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864607Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:10323:12335] finished for tablet 9437184 2025-07-08T11:59:56.864695Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[1:10319:12331];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.024},{"events":["l_bootstrap"],"t":0.049},{"events":["f_processing","f_task_result"],"t":0.05},{"events":["l_task_result"],"t":1.713},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.714}],"full":{"a":1751975995150204,"name":"_full_task","f":1751975995150204,"d_finished":0,"c":0,"l":1751975996864618,"d":1714414},"events":[{"name":"bootstrap","f":1751975995150565,"d_finished":49427,"c":1,"l":1751975995199992,"d":49427},{"a":1751975996864578,"name":"ack","f":1751975996864323,"d_finished":185,"c":1,"l":1751975996864508,"d":225},{"a":1751975996864576,"name":"processing","f":1751975995200536,"d_finished":1250739,"c":1766,"l":1751975996864509,"d":1250781},{"name":"ProduceResults","f":1751975995174234,"d_finished":729574,"c":1769,"l":1751975996864603,"d":729574},{"a":1751975996864604,"name":"Finish","f":1751975996864604,"d_finished":0,"c":0,"l":1751975996864618,"d":14},{"name":"task_result","f":1751975995200544,"d_finished":1247071,"c":1765,"l":1751975996863825,"d":1247071}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[1:10319:12331];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T11:59:56.864756Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[1:10319:12331];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.024},{"events":["l_bootstrap"],"t":0.049},{"events":["f_processing","f_task_result"],"t":0.05},{"events":["l_task_result"],"t":1.713},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.714}],"full":{"a":1751975995150204,"name":"_full_task","f":1751975995150204,"d_finished":0,"c":0,"l":1751975996864719,"d":1714515},"events":[{"name":"bootstrap","f":1751975995150565,"d_finished":49427,"c":1,"l":1751975995199992,"d":49427},{"a":1751975996864578,"name":"ack","f":1751975996864323,"d_finished":185,"c":1,"l":1751975996864508,"d":326},{"a":1751975996864576,"name":"processing","f":1751975995200536,"d_finished":1250739,"c":1766,"l":1751975996864509,"d":1250882},{"name":"ProduceResults","f":1751975995174234,"d_finished":729574,"c":1769,"l":1751975996864603,"d":729574},{"a":1751975996864604,"name":"Finish","f":1751975996864604,"d_finished":0,"c":0,"l":1751975996864719,"d":115},{"name":"task_result","f":1751975995200544,"d_finished":1247071,"c":1765,"l":1751975996863825,"d":1247071}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-07-08T11:59:56.864772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T11:59:55.149564Z;index_granules=0;index_portions=294;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=686784;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=686784;selected_rows=0; 2025-07-08T11:59:56.864779Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T11:59:56.864843Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:10323:12335];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> Viewer::PDiskMerging [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> Viewer::SelectStringWithBase64Encoding >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 >> LocalPartition::WithoutPartition [GOOD] >> LocalPartition::WithoutPartitionDeadNode >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] >> TSchemeShardTest::CreateTable >> Viewer::JsonAutocompleteEmpty >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> Viewer::TabletMergingPacked >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::DependentOps ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-07-08T11:59:57.518897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001224/r3tmp/tmpfSnJB5/pdisk_1.dat 2025-07-08T11:59:57.733883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.750523Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:57.789374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:57.789404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:57.801355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.886645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.906625Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T11:59:57.906703Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.925622Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.925666Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.925841Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.925850Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.925857Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.925927Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.925944Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.925956Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T11:59:57.937173Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.941053Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.941121Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.941144Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T11:59:57.941150Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.941155Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.941161Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.941330Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.941357Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.941376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.941382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.941392Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.941397Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.941513Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T11:59:57.941556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.941616Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.941637Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.941944Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.952258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.952304Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:58.117657Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T11:59:58.118604Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T11:59:58.118628Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.118806Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.118818Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T11:59:58.118831Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T11:59:58.118908Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T11:59:58.118953Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T11:59:58.119103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.119121Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T11:59:58.119474Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T11:59:58.119575Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.119815Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T11:59:58.119820Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.119956Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T11:59:58.119965Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.120199Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.120207Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:58.120212Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T11:59:58.120231Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T11:59:58.120239Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T11:59:58.120248Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.120856Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:58.121087Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T11:59:58.121096Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T11:59:58.121230Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T11:59:58.203584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygcnaad3y5gen0kt23rba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZjMGU4NzItNDExZWMzYzMtOWVhZjdhNTMtOWViMTk5NTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T11:59:58.204692Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T11:59:58.204788Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.237257Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.237322Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.238537Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:58.238874Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T11:59:58.261348Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T11:59:58.261381Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:58.261446Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T11:59:58.261455Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T11:59:58.261541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.261548Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:58.261557Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:58.261569Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.261588Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T11:59:58.261830Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:58.261923Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:58.261970Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:58.261973Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:58.261979Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T11:59:58.262019Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T11:59:58.262025Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:58.262146Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T12:00:01.143358Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:01.143400Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:01.143558Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:01.143567Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:01.143574Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:01.143620Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:01.143640Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:01.143650Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:619:2519] in generation 1 2025-07-08T12:00:01.155679Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:01.155710Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:01.155741Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:01.155755Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:621:2529] 2025-07-08T12:00:01.155761Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:01.155766Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:01.155771Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.155884Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:01.155906Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:01.155923Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.155929Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:01.155936Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:01.155942Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.156033Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:599:2516], serverId# [3:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:01.156084Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:01.156140Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:01.156158Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:01.156467Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:01.169241Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:01.169285Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:01.341731Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:640:2542], serverId# [3:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:01.341840Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:01.341850Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.342102Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.342115Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:01.342125Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:01.342191Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:01.342227Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:01.342313Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.342327Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:01.342416Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:01.342498Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:01.342806Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:01.342814Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.342941Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:01.342951Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.343099Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.343108Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:01.343114Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:01.343129Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:01.343138Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:01.343149Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.343377Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:01.343717Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:01.343756Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:01.343763Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:01.384700Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygft03tnebga4xbeeqphf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTEzN2ZjMDAtM2M0NDZlMWEtM2Q2MjVjNjMtYTUwNjA3NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:01.384867Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:685:2577], serverId# [3:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:01.384932Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:01.419737Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:01.419788Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.420979Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:01.421361Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:01.449200Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:01.449233Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:01.449300Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:01.449309Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:01.449405Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.449413Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:01.449422Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:01.449435Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.449451Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:01.449670Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:01.449752Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:01.449789Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.449795Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:01.449801Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:01.449851Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:01.449858Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.449974Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T12:00:01.450071Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:01.450103Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T12:00:01.450110Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T12:00:01.450200Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:01.450206Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T12:00:01.450229Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:01.450234Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:01.450240Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:01.450269Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:01.450278Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:01.450285Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001cc9/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 61358, MsgBus: 21538 2025-07-08T11:59:41.699028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679544258939337:2245];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:41.700141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001cc9/r3tmp/tmpAfJ2gj/pdisk_1.dat 2025-07-08T11:59:41.843932Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61358, node 1 2025-07-08T11:59:41.873132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:41.873143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:41.873144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:41.873182Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21538 2025-07-08T11:59:41.917383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:41.917418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:41.921798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:42.041812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:42.049501Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:42.062242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.107828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:59:42.153877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:42.185673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.604927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.641340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.666420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.687039Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:42.688938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.703230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.763063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.786464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:46.693089Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679544258939337:2245];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:46.693921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:59:56.841217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T11:59:56.841234Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"bff599a4-360415a0-b1e6e3aa-b2b80ec") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV1")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"612dccbd-9a24f0ee-fd39435f-242a4e3c") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"a4cccd22-3a9acf89-a7a0042b-e9969e2d")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-07-08T12:00:01.421852Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 0, bytes: 1401088 2025-07-08T12:00:01.421884Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 1, bytes: 84 2025-07-08T12:00:01.421889Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 2, bytes: 2402376 2025-07-08T12:00:01.421895Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 3, bytes: 144 2025-07-08T12:00:01.421898Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 4, bytes: 600472 2025-07-08T12:00:01.421902Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 5, bytes: 36 2025-07-08T12:00:01.421905Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 6, bytes: 1200936 2025-07-08T12:00:01.421910Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 7, bytes: 72 2025-07-08T12:00:01.421913Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 8, bytes: 1200744 2025-07-08T12:00:01.421917Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 9, bytes: 72 2025-07-08T12:00:01.421924Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 10, bytes: 1601312 2025-07-08T12:00:01.421929Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 11, bytes: 96 2025-07-08T12:00:01.421932Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 12, bytes: 2001584 2025-07-08T12:00:01.421938Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 13, bytes: 120 2025-07-08T12:00:01.421942Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 14, bytes: 1801952 2025-07-08T12:00:01.421975Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 15, bytes: 108 2025-07-08T12:00:01.421979Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 16, bytes: 1000792 2025-07-08T12:00:01.421983Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 17, bytes: 60 2025-07-08T12:00:01.421989Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 18, bytes: 2001792 2025-07-08T12:00:01.421994Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 19, bytes: 120 2025-07-08T12:00:01.421999Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 20, bytes: 2202288 2025-07-08T12:00:01.422004Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 21, bytes: 132 2025-07-08T12:00:01.422008Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 22, bytes: 2002000 2025-07-08T12:00:01.422013Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7524679630158292795:6033], blobId: 23, bytes: 120 2025-07-08T12:00:01.422141Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976715970. Error: [TEvError] File size limit exceeded: 1/0Mb 2025-07-08T12:00:01.423303Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679630158292786:4323], TxId: 281474976715970, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmJmOWRlODEtODNiZjFhZjUtNzVkZDc2MmYtNGUzZjE1MmI=. TraceId : 01jzmygfmy7ad2dfq5h7dqn4vc. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 1/0Mb }. 2025-07-08T12:00:01.429178Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679630158292787:4324], TxId: 281474976715970, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmJmOWRlODEtODNiZjFhZjUtNzVkZDc2MmYtNGUzZjE1MmI=. TraceId : 01jzmygfmy7ad2dfq5h7dqn4vc. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-07-08T12:00:01.430208Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmJmOWRlODEtODNiZjFhZjUtNzVkZDc2MmYtNGUzZjE1MmI=, ActorId: [1:7524679630158292772:4318], ActorState: ExecuteState, TraceId: 01jzmygfmy7ad2dfq5h7dqn4vc, Create QueryResponse for error on request, msg: >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001cd9/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 22305, MsgBus: 27946 2025-07-08T11:59:40.557075Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679539249600426:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:40.652508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001cd9/r3tmp/tmpLjA7B0/pdisk_1.dat 2025-07-08T11:59:40.748090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:40.770614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:40.770639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22305, node 1 2025-07-08T11:59:40.773401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:40.785157Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:40.785173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:40.785175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:40.785222Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27946 TClient is connected to server localhost:27946 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:40.959755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:40.969391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:59:40.981456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:41.025649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:41.123946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:41.146151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:41.549099Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:41.939323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.973678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:41.989444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.009090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.029801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.049881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:42.067544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:45.561014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679539249600426:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:45.561175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:59:55.745061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T11:59:55.745079Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"bbd7773f-73778c6d-13653ebb-8a97f1b") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV1")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"50699478-f09a63e0-73e68a28-e6f9b91d") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"e9d911d0-e7b682e-801f9c6a-a00c7b01")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> Viewer::JsonAutocompleteColumns [GOOD] >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::Boot >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-07-08T12:00:02.312116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2368, node 1 TClient is connected to server localhost:3570 json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects |66.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |66.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQFail ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-07-08T11:59:56.783901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001228/r3tmp/tmpXx2I0t/pdisk_1.dat 2025-07-08T11:59:56.913965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:56.934014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:56.969439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:56.969473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:56.985384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:57.078394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:57.106454Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:625:2535] 2025-07-08T11:59:57.106521Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.123416Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.123459Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.123605Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:57.123614Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:57.123621Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:57.123702Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.123753Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.123764Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:652:2535] in generation 1 2025-07-08T11:59:57.124184Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:627:2537] 2025-07-08T11:59:57.124225Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.129948Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.130036Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:631:2539] 2025-07-08T11:59:57.130064Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:57.131018Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.131143Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T11:59:57.131150Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T11:59:57.131156Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T11:59:57.131190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.131245Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.131253Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:675:2537] in generation 1 2025-07-08T11:59:57.131313Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:57.131324Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:57.131418Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-07-08T11:59:57.131424Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-07-08T11:59:57.131429Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-07-08T11:59:57.131448Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:57.131462Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:57.131468Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:676:2539] in generation 1 2025-07-08T11:59:57.145207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.149454Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:57.149503Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.149525Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2566] 2025-07-08T11:59:57.149532Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:57.149537Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:57.149542Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:57.149569Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.149575Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T11:59:57.149606Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.149615Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:681:2567] 2025-07-08T11:59:57.149618Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T11:59:57.149622Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T11:59:57.149625Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T11:59:57.149700Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:57.149706Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-07-08T11:59:57.149716Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:57.149723Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:682:2568] 2025-07-08T11:59:57.149727Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T11:59:57.149730Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-07-08T11:59:57.149734Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T11:59:57.149826Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:57.149845Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:57.149851Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T11:59:57.149859Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T11:59:57.149875Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:57.149881Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.149889Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:57.149894Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:57.149900Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T11:59:57.149903Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.149907Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T11:59:57.149911Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T11:59:57.149917Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-07-08T11:59:57.149925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-07-08T11:59:57.149942Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:614:2530], serverId# [1:640:2543], sessionId# [0:0:0] 2025-07-08T11:59:57.149949Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:615:2531], serverId# [1:648:2549], sessionId# [0:0:0] 2025-07-08T11:59:57.149953Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T11:59:57.149957Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:57.149961Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-07-08T11:59:57.149966Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T11:59:57.150007Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:57.150056Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:57.150075Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:57.150169Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T11:59:57.150196Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T11:59:57.150206Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T11:59:57.150636Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:57.150651Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T11:59:57.160965Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:57.161004Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:57.161163Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T11:59:57.161174Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T11:59:57.209183Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:616:2532], serverId# [1:699:2578], sessionId# [0:0:0] 2025-07-08T11:59:57.209249Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T11:59:57.209297Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-07-08T11:59:57.209323Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-07-08T11:59:57.209424Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T11:59:57.221339Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T11:59:57.221379Z ... eration [1500:281474976715661] at 72075186224037888 for LoadAndWaitInRS 2025-07-08T12:00:03.405659Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.405766Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715661 2025-07-08T12:00:03.405778Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-07-08T12:00:03.405791Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-07-08T12:00:03.405845Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:03.405851Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:03.405856Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [1500:281474976715661] at 72075186224037890 for LoadAndWaitInRS 2025-07-08T12:00:03.405923Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.417352Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:03.417398Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [3:930:2731], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:00:03.417416Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 1500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-07-08T12:00:03.417425Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:03.417471Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.417482Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [3:930:2731], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:00:03.417490Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 1500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-07-08T12:00:03.417495Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.417515Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2025-07-08T12:00:03.417532Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715661, shard# 72075186224037890, status# 2 2025-07-08T12:00:03.417546Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-07-08T12:00:03.417553Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715661, shard# 72075186224037888, status# 2 2025-07-08T12:00:03.417560Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Reply: txId# 281474976715661, status# OK, error# 2025-07-08T12:00:03.417631Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-07-08T12:00:03.417639Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-07-08T12:00:03.417697Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:03.417706Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.417714Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:03.417729Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:03.417772Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:924:2726], serverId# [3:925:2727], sessionId# [0:0:0] 2025-07-08T12:00:03.418065Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:03.418159Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:03.418212Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:03.418221Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.418228Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2025-07-08T12:00:03.418275Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.418286Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:03.418420Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2025-07-08T12:00:03.418504Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:03.418528Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2025-07-08T12:00:03.418535Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2025-07-08T12:00:03.418612Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-07-08T12:00:03.418619Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037889 2025-07-08T12:00:03.418635Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:03.418640Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.418647Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2025-07-08T12:00:03.418674Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.418683Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:03.418691Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:03.418961Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:03.419016Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:03.419047Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.419053Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419059Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:03.419087Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419095Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.419196Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-07-08T12:00:03.419234Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:03.419252Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-07-08T12:00:03.419258Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-07-08T12:00:03.419301Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:03.419306Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-07-08T12:00:03.419326Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.419330Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419336Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:03.419354Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.419362Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.419369Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.419588Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:03.419636Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:03.419669Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:03.419675Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419680Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-07-08T12:00:03.419709Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419716Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:03.419809Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-07-08T12:00:03.419841Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:03.419857Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-07-08T12:00:03.419862Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-07-08T12:00:03.419885Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-07-08T12:00:03.419890Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-07-08T12:00:03.419928Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:03.419933Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.419939Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2025-07-08T12:00:03.419958Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.419966Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:03.419972Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> TSchemeShardTest::RmDirTwice >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/43nv/001cc7/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 8570, MsgBus: 18038 2025-07-08T11:59:42.530925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679547962288275:2086];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001cc7/r3tmp/tmpT5ZzYL/pdisk_1.dat 2025-07-08T11:59:42.598312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:59:42.669436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8570, node 1 2025-07-08T11:59:42.723859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:42.723906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:42.725330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:42.729121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:42.729124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:42.729126Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:42.729179Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18038 TClient is connected to server localhost:18038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:59:42.886418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:42.893214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:59:42.901657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:42.954012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:59:43.005663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.026225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:59:43.186248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.199945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.221547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.237141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.253933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.270233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.288301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T11:59:43.529182Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:59:47.533058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679547962288275:2086];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:59:47.533952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:59:57.656998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T11:59:57.657014Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '785) '('"_id" '"e9a72d4e-1ddf7b58-ed8df883-97404b5c") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7 '1 '"HashV1")) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '683) '('"_id" '"eec74796-512f5bf2-f0b9dae5-32f166be") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '695) '('"_id" '"739517f7-6d9ad181-69f09f30-39204484")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> Viewer::Cluster10000Tablets >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-07-08T12:00:00.156386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011e5/r3tmp/tmpIdGuqS/pdisk_1.dat 2025-07-08T12:00:00.279836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:00.296308Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:00.335385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:00.335420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:00.349411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:00.423875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:00.444793Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:00.444887Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:00.455917Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.455957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:00.456132Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:00.456143Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:00.456150Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:00.456223Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:00.456244Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:00.456258Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:00.466638Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:00.470889Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:00.470970Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:00.470996Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:00.471002Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:00.471007Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:00.471013Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.471192Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:00.471220Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:00.471240Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.471246Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.471255Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:00.471260Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.471372Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:00.471414Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.471476Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:00.471494Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:00.471798Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:00.482189Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.482236Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:00.630727Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:00.631629Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:00.631650Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.631787Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.631800Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:00.631811Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:00.631877Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:00.631912Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:00.632008Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.632024Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:00.632426Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:00.632534Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.632778Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:00.632788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.632939Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:00.632967Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.633156Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.633167Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:00.633173Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:00.633189Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:00.633199Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:00.633209Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.633973Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:00.634193Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:00.634204Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:00.634321Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:00.733938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygf3vbsdphjgqxezyejwx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGZjYjg4NWUtNjBmMWZiYmQtYTRmMjYwZDMtOGE3NDg3YTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:00.735222Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:00.735314Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.761223Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.761285Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.762423Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:00.762700Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:00.785174Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:00.785212Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.785279Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:00.785289Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:00.785383Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.785393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.785404Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:00.785419Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.785441Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:00.785691Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.785795Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.785848Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.785853Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:00.785861Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:00.785904Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:00.785911Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.786051Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T12:00:03.720019Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:03.720067Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:03.720261Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:03.720273Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:03.720281Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:03.720340Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:03.720373Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:03.720387Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:619:2519] in generation 1 2025-07-08T12:00:03.737232Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:03.737267Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:03.737298Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:03.737314Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:621:2529] 2025-07-08T12:00:03.737319Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:03.737323Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:03.737329Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.737461Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:03.737490Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:03.737510Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.737517Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.737526Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:03.737532Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.737643Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:599:2516], serverId# [3:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:03.737685Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:03.737750Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:03.737770Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:03.738097Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:03.748521Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:03.748576Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:03.899732Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:640:2542], serverId# [3:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:03.899856Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:03.899866Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.900169Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.900183Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:03.900195Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:03.900259Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:03.900297Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:03.900391Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.900407Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:03.900494Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:03.900582Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.900916Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:03.900926Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.901071Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:03.901082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.901233Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.901242Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:03.901248Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:03.901266Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:03.901276Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:03.901286Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.901520Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:03.901856Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:03.901899Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:03.901906Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:03.934720Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygj9zf944xxb819sm3tb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWYzMTZkMS05YWU4NGY5NS1mMTIyYmIxZi1jN2M2ZDcwOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:03.934907Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:685:2577], serverId# [3:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:03.934977Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:03.961735Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:03.961791Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.962859Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:03.963132Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:03.989164Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:03.989199Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:03.989271Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:03.989280Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:03.989376Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.989385Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.989396Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:03.989412Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.989432Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:693:2584], serverId# [3:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:03.989672Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:03.989771Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:03.989807Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.989812Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.989819Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:03.989864Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.989871Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.989979Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T12:00:03.990043Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:03.990066Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T12:00:03.990072Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T12:00:03.990149Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:03.990153Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T12:00:03.990168Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:03.990171Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:03.990176Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:03.990205Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:03.990211Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:03.990217Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-07-08T12:00:00.053622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011e7/r3tmp/tmpgmD3rf/pdisk_1.dat 2025-07-08T12:00:00.204198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:00.220673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:00.261047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:00.261085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:00.273356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:00.358996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:00.420276Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:625:2535] 2025-07-08T12:00:00.420365Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:00.429176Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.429220Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:00.429380Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:00.429390Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:00.429397Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:00.429464Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:00.429535Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:00.429546Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:652:2535] in generation 1 2025-07-08T12:00:00.429915Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:627:2537] 2025-07-08T12:00:00.429954Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:00.431418Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.431510Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:631:2539] 2025-07-08T12:00:00.431543Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:00.432539Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:00.432686Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T12:00:00.432695Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T12:00:00.432702Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T12:00:00.432745Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:00.432811Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:00.432822Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:675:2537] in generation 1 2025-07-08T12:00:00.432890Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.432904Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:00.433016Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-07-08T12:00:00.433024Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-07-08T12:00:00.433029Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-07-08T12:00:00.433054Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:00.433071Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:00.433079Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:676:2539] in generation 1 2025-07-08T12:00:00.445383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:00.449315Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:00.449372Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:00.449397Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2566] 2025-07-08T12:00:00.449402Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:00.449408Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:00.449414Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:00.449443Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:00.449450Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T12:00:00.449460Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:00.449468Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:681:2567] 2025-07-08T12:00:00.449471Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:00.449474Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T12:00:00.449477Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:00.449556Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:00.449561Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-07-08T12:00:00.449569Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:00.449576Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:682:2568] 2025-07-08T12:00:00.449579Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T12:00:00.449583Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-07-08T12:00:00.449586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:00.449685Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:00.449709Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:00.449715Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T12:00:00.449722Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T12:00:00.449737Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:00.449744Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.449753Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:00.449757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:00.449763Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:00.449766Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.449769Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:00.449773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:00.449778Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-07-08T12:00:00.449785Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-07-08T12:00:00.449801Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:614:2530], serverId# [1:640:2543], sessionId# [0:0:0] 2025-07-08T12:00:00.449808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:615:2531], serverId# [1:648:2549], sessionId# [0:0:0] 2025-07-08T12:00:00.449812Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:00.449815Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:00.449818Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-07-08T12:00:00.449822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:00.449865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:00.449924Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:00.449949Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:00.450043Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:00.450068Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T12:00:00.450077Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T12:00:00.450517Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:00.450533Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:00.461281Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:00.461321Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:00.461474Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:00.461483Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:00.507393Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:616:2532], serverId# [1:699:2578], sessionId# [0:0:0] 2025-07-08T12:00:00.507455Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:00.507509Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-07-08T12:00:00.507531Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-07-08T12:00:00.507637Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:00.518020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:00.518066Z ... 6644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-07-08T12:00:04.583521Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Handle TEvDataShard::TEvEraseRowsRequest 2025-07-08T12:00:04.583544Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Propose tx: txId# 281474976715661, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-07-08T12:00:04.583559Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Propose tx: txId# 281474976715661, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-07-08T12:00:04.583568Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Propose tx: txId# 281474976715661, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-07-08T12:00:04.583616Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:04.583657Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715661 at tablet 72075186224037890 2025-07-08T12:00:04.583736Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:04.583748Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715661 at tablet 72075186224037888 2025-07-08T12:00:04.583781Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:04.583794Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715661 at tablet 72075186224037889 2025-07-08T12:00:04.594598Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-07-08T12:00:04.594632Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:04.594675Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-07-08T12:00:04.594688Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 2025-07-08T12:00:04.594705Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715661, shard# 72075186224037888, status# 1 2025-07-08T12:00:04.594729Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:04.594746Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715661, shard# 72075186224037889, status# 1 2025-07-08T12:00:04.594760Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-07-08T12:00:04.594766Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:04.594780Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715660 2025-07-08T12:00:04.594787Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715660 2025-07-08T12:00:04.594793Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715661, shard# 72075186224037890, status# 1 2025-07-08T12:00:04.594799Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:930:2731] Register plan: txId# 281474976715661, minStep# 1002, maxStep# 31002 2025-07-08T12:00:04.605108Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:04.606156Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-07-08T12:00:04.606201Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-07-08T12:00:04.606757Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-07-08T12:00:04.606775Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-07-08T12:00:04.606796Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:930:2731] Reply: txId# 281474976715661, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715661, shard# 72075186224037888 2025-07-08T12:00:04.607020Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-07-08T12:00:04.607053Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-07-08T12:00:04.607080Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:04.607089Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:04.607099Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-07-08T12:00:04.607107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:04.607172Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:924:2726], serverId# [3:925:2727], sessionId# [0:0:0] 2025-07-08T12:00:04.622684Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:950:2744] 2025-07-08T12:00:04.622749Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:04.623124Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:04.623394Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:04.623611Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:04.623621Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:04.623627Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:04.623682Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:04.623759Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:04.623769Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:965:2744] in generation 2 2025-07-08T12:00:04.637208Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:04.637258Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-07-08T12:00:04.637287Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:04.637355Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:968:2752] 2025-07-08T12:00:04.637360Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:04.637366Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:04.637372Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:04.637437Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-07-08T12:00:04.637461Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-07-08T12:00:04.637747Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:04.637770Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:04.637789Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1001 2025-07-08T12:00:04.637795Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:04.637835Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:04.637853Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:04.637860Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:04.637869Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-07-08T12:00:04.637874Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:04.637891Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-07-08T12:00:04.637904Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715660 2025-07-08T12:00:04.637911Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715660 2025-07-08T12:00:04.637953Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715660 2025-07-08T12:00:04.637966Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1001 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:04.637974Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1001:281474976715660 at 72075186224037889 2025-07-08T12:00:04.637983Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-07-08T12:00:04.637989Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1001 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:04.638002Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1000 next step 1001 2025-07-08T12:00:04.638011Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-07-08T12:00:04.638015Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715660 2025-07-08T12:00:04.638030Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715660 2025-07-08T12:00:04.638049Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 2025-07-08T12:00:04.638076Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715660 2025-07-08T12:00:04.638083Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1001 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-07-08T12:00:04.638087Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1001:281474976715660 at 72075186224037890 2025-07-08T12:00:04.638092Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-07-08T12:00:04.638097Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1001 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-07-08T12:00:04.638117Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715660 >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase |66.5%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TSchemeShardTest::DropPQAbort >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |66.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |66.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit [GOOD] >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> KqpScanArrowInChanels::AggregateNoColumn >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit >> KqpScanArrowFormat::AggregateCountStar >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> KqpScanArrowFormat::AllTypesColumns >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> KqpScanArrowFormat::SingleKey >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::CreateTableWithConfig >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-07-08T12:00:05.418212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011e4/r3tmp/tmpYRYzL1/pdisk_1.dat 2025-07-08T12:00:05.582112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:05.604366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:05.638592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:05.638641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:05.649499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:05.729468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:05.748282Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:05.748377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:05.784211Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:05.784262Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:05.784435Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:05.784445Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:05.784452Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:05.784531Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:05.784551Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:05.784565Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:05.797226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:05.809539Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:05.809629Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:05.809660Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:05.809666Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:05.809672Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:05.809678Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:05.809869Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:05.809900Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:05.809918Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:05.809924Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:05.809933Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:05.809938Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:05.810057Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:05.810099Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:05.810164Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:05.810182Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:05.810487Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:05.821228Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:05.821283Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:06.009873Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:06.010893Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:06.010913Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.011058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.011069Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:06.011081Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:06.011162Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:06.011200Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:06.011305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.011320Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:06.011711Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:06.011824Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.012065Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:06.012073Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.012245Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:06.012256Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.012464Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.012474Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:06.012480Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:06.012499Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:06.012510Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:06.012521Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.017653Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:06.017951Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:06.017964Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:06.018119Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:06.202390Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmygmc3e33ye29gt39tzed0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjYxZTc3ZGItNWRlMzFiMWQtNzVmMjM3M2YtZTYzOGZkNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:06.203671Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:06.203766Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:06.227006Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:06.227075Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.228356Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:06.284726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmygmjn3wvm33wdemper7h7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQwMGZiMGItMjEwZGJjZS01ZWU5NDg3My03YmY0MmU1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:06.284922Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:06.313234Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:06.313303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.313920Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:06.324404Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:06.324441Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.324532Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:06.324542Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:06.324605Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.324615Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.324626Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:06.324642Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.324684Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:06.324994Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:06.325118Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:06.325179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.325185Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:06.325193Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715660] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:06.325244Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:06.325252Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.325400Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715660, MessageQuota: 1 2025-07-08T12:00:06.325462Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715660, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:06.325485Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715660, PendingAcks: 0 2025-07-08T12:00:06.325491Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715660, MessageQuota: 0 2025-07-08T12:00:06.325591Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:06.325598Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715660, at: 72075186224037888 2025-07-08T12:00:06.325626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.325631Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:06.325637Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715660] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:06.325673Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.325681Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.325689Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:07.105141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011e4/r3tmp/tmpzMZLTF/pdisk_1.dat 2025-07-08T12:00:07.244056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.261584Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:07.294992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:07.295035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:07.305796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:07.386805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.402178Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:604:2520] 2025-07-08T12:00:07.402228Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:07.409413Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:07.409464Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:07.409635Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:07.409644Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:07.409652Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:07.409705Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:07.409734Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:07.409748Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:620:2520] in generation 1 2025-07-08T12:00:07.420270Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:07.420309Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:07.420363Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:07.420390Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:622:2530] 2025-07-08T12:00:07.420395Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:07.420400Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:07.420405Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:07.420540Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:07.420566Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:07.420679Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:07.420687Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:07.420696Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:07.420701Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:07.420714Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:600:2517], serverId# [2:611:2524], sessionId# [0:0:0] 2025-07-08T12:00:07.420744Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:07.420814Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:07.420832Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:07.421219Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:07.431596Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:07.431648Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:07.593787Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:640:2542], serverId# [2:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:07.593987Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:07.594001Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:07.594042Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:07.594051Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:07.594063Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:07.594142Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:07.594176Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:07.594347Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:07.594365Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:07.594478Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:07.594568Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:07.594926Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:07.594937Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:07.595046Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:07.595055Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:07.595233Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:07.595241Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:07.595247Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:07.595265Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:07.595276Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:07.595286Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:07.595461Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:07.595877Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:07.595889Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:07.595950Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:07.596783Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:676:2570], serverId# [2:677:2571], sessionId# [0:0:0] 2025-07-08T12:00:07.596810Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-07-08T12:00:07.596843Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:676:2570], serverId# [2:677:2571], sessionId# [0:0:0] >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-07-08T12:00:02.455792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 24646, node 1 TClient is connected to server localhost:25733 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:04.010738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 32177, node 2 TClient is connected to server localhost:32251 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:05.416234Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 16352, node 3 TClient is connected to server localhost:19739 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:06.844929Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 6613, node 4 TClient is connected to server localhost:4100 json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:08.374210Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18845, node 5 TClient is connected to server localhost:26732 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::PreserveColumnOrder |66.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |66.5%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::CopyTableAndConcurrentChanges >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TSchemeShardTest::PreserveColumnOrder [GOOD] >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize |66.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-07-08T12:00:03.114830Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 9979, node 1 TClient is connected to server localhost:19203 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:04.505270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28614, node 2 TClient is connected to server localhost:15198 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:06.256350Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13438, node 3 TClient is connected to server localhost:65016 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} 2025-07-08T12:00:07.933274Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 25584, node 4 TClient is connected to server localhost:29082 json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"id","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"orders"}]},"Version":2} 2025-07-08T12:00:10.176529Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 16871, node 5 TClient is connected to server localhost:5442 json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T11:59:59.401605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:59.401647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:59.401654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:59.401659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:59.401675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:59.401679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:59.401689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:59.401701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:59.401765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:59.414232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T11:59:59.414255Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:59.417949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:59.418009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:59.418057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:59.419560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:59.419635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:59.419730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:59.419916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:59.420767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:59.420824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:59.421107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:59.421121Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:59.421139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:59.421147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:59.421151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:59.421179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.422485Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T11:59:59.440408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:59.440504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.440577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:59.440678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:59.440691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.441517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:59.441547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:59.441596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.441606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:59.441611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:59.441616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:59.441988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.441999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:59.442003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:59.442287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.442295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.442302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:59.442309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:59.442805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:59.443172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:59.443215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:59.443385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:59.443406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:59.443415Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:59.443478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:59.443484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:59.443514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:59.443524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:59.443909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:59.443917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:59.443964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:59.443969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:59.443980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:59.443986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:59.443998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:59.444002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:59.444006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:59.444009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:59.444014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:59.444019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:59.444024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T11:59:59.444027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T11:59:59.444048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T11:59:59.444054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T11:59:59.444058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T11:59:59.444420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T11:59:59.444436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... MESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:10.789482Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:10.789487Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-07-08T12:00:10.789509Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T12:00:10.789522Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-07-08T12:00:10.789849Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 360 } } 2025-07-08T12:00:10.789858Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:10.789875Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 360 } } 2025-07-08T12:00:10.789887Z node 15 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 360 } } 2025-07-08T12:00:10.797278Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 64424511735 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:10.797304Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:10.797329Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 64424511735 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:10.797338Z node 15 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T12:00:10.797346Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 64424511735 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:10.797360Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:10.797365Z node 15 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:10.797370Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T12:00:10.797377Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-07-08T12:00:10.797712Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:10.798003Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:10.798378Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:10.798414Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:10.798480Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:10.798490Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:00:10.798504Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:10.798509Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:10.798513Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:10.798516Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:10.798521Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-07-08T12:00:10.798536Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:334:2313] message: TxId: 101 2025-07-08T12:00:10.798544Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:10.798549Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:00:10.798553Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:00:10.798575Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:10.798995Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:00:10.799008Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:335:2314] TestWaitNotification: OK eventTxId 101 2025-07-08T12:00:10.799118Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:10.799197Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 87us result status StatusSuccess 2025-07-08T12:00:10.799357Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "col01" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col02" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col03" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "col04" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "col05" Type: "Utf8" TypeId: 4608 Id: 5 NotNull: false IsBuildInProgress: false } Columns { Name: "col06" Type: "Utf8" TypeId: 4608 Id: 6 NotNull: false IsBuildInProgress: false } Columns { Name: "col07" Type: "Utf8" TypeId: 4608 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "col08" Type: "Utf8" TypeId: 4608 Id: 8 NotNull: false IsBuildInProgress: false } Columns { Name: "col09" Type: "Utf8" TypeId: 4608 Id: 9 NotNull: false IsBuildInProgress: false } Columns { Name: "col10" Type: "Utf8" TypeId: 4608 Id: 10 NotNull: false IsBuildInProgress: false } Columns { Name: "col11" Type: "Utf8" TypeId: 4608 Id: 11 NotNull: false IsBuildInProgress: false } Columns { Name: "col12" Type: "Utf8" TypeId: 4608 Id: 12 NotNull: false IsBuildInProgress: false } Columns { Name: "col13" Type: "Utf8" TypeId: 4608 Id: 13 NotNull: false IsBuildInProgress: false } Columns { Name: "col14" Type: "Utf8" TypeId: 4608 Id: 14 NotNull: false IsBuildInProgress: false } Columns { Name: "col15" Type: "Utf8" TypeId: 4608 Id: 15 NotNull: false IsBuildInProgress: false } Columns { Name: "col16" Type: "Utf8" TypeId: 4608 Id: 16 NotNull: false IsBuildInProgress: false } Columns { Name: "col17" Type: "Utf8" TypeId: 4608 Id: 17 NotNull: false IsBuildInProgress: false } Columns { Name: "col18" Type: "Utf8" TypeId: 4608 Id: 18 NotNull: false IsBuildInProgress: false } Columns { Name: "col19" Type: "Utf8" TypeId: 4608 Id: 19 NotNull: false IsBuildInProgress: false } Columns { Name: "col20" Type: "Utf8" TypeId: 4608 Id: 20 NotNull: false IsBuildInProgress: false } Columns { Name: "col21" Type: "Utf8" TypeId: 4608 Id: 21 NotNull: false IsBuildInProgress: false } Columns { Name: "col22" Type: "Utf8" TypeId: 4608 Id: 22 NotNull: false IsBuildInProgress: false } Columns { Name: "col23" Type: "Utf8" TypeId: 4608 Id: 23 NotNull: false IsBuildInProgress: false } Columns { Name: "col24" Type: "Utf8" TypeId: 4608 Id: 24 NotNull: false IsBuildInProgress: false } Columns { Name: "col25" Type: "Utf8" TypeId: 4608 Id: 25 NotNull: false IsBuildInProgress: false } Columns { Name: "col26" Type: "Utf8" TypeId: 4608 Id: 26 NotNull: false IsBuildInProgress: false } Columns { Name: "col27" Type: "Utf8" TypeId: 4608 Id: 27 NotNull: false IsBuildInProgress: false } Columns { Name: "col28" Type: "Utf8" TypeId: 4608 Id: 28 NotNull: false IsBuildInProgress: false } Columns { Name: "col29" Type: "Utf8" TypeId: 4608 Id: 29 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col01" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-07-08T12:00:06.554662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011d7/r3tmp/tmp5gKqvV/pdisk_1.dat 2025-07-08T12:00:06.700220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:06.719891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:06.752824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:06.752870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:06.768470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:06.845286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:06.869429Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:625:2535] 2025-07-08T12:00:06.869531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:06.879166Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:06.879216Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:06.879382Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:06.879390Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:06.879397Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:06.879469Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:06.879510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:06.879522Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:652:2535] in generation 1 2025-07-08T12:00:06.879959Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:627:2537] 2025-07-08T12:00:06.879996Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:06.881626Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:06.881705Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:631:2539] 2025-07-08T12:00:06.881731Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:06.882647Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:06.882783Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T12:00:06.882791Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T12:00:06.882797Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T12:00:06.882837Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:06.882891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:06.882901Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:675:2537] in generation 1 2025-07-08T12:00:06.882960Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:06.882971Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:06.883057Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-07-08T12:00:06.883062Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-07-08T12:00:06.883067Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-07-08T12:00:06.883085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:06.883098Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:06.883105Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:676:2539] in generation 1 2025-07-08T12:00:06.893427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:06.897599Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:06.897678Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:06.897707Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2566] 2025-07-08T12:00:06.897713Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:06.897719Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:06.897725Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:06.897761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:06.897769Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T12:00:06.897778Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:06.897786Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:681:2567] 2025-07-08T12:00:06.897789Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:06.897792Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T12:00:06.897795Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:06.897889Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:06.897894Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-07-08T12:00:06.897903Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:06.897910Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:682:2568] 2025-07-08T12:00:06.897914Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T12:00:06.897917Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-07-08T12:00:06.897920Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:06.898040Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:06.898069Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:06.898075Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T12:00:06.898083Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T12:00:06.898097Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:06.898104Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.898114Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:06.898118Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:06.898124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:06.898128Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.898131Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:06.898135Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:06.898140Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-07-08T12:00:06.898150Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-07-08T12:00:06.898168Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:614:2530], serverId# [1:640:2543], sessionId# [0:0:0] 2025-07-08T12:00:06.898174Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:615:2531], serverId# [1:648:2549], sessionId# [0:0:0] 2025-07-08T12:00:06.898178Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:06.898181Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:06.898184Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-07-08T12:00:06.898188Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:06.898236Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:06.898297Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:06.898323Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:06.898430Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:06.898454Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T12:00:06.898463Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T12:00:06.898925Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:06.898938Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:06.909238Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:06.909285Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:06.909472Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:06.909481Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:06.957270Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:616:2532], serverId# [1:699:2578], sessionId# [0:0:0] 2025-07-08T12:00:06.957360Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:06.957428Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-07-08T12:00:06.957465Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-07-08T12:00:06.957599Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:06.969211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:06.969267Z ... d: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-07-08T12:00:11.301924Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-07-08T12:00:11.301977Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715662 2025-07-08T12:00:11.301987Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-07-08T12:00:11.302135Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715662 2025-07-08T12:00:11.302142Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-07-08T12:00:11.302401Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715662 at state Ready 2025-07-08T12:00:11.312843Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715662 2025-07-08T12:00:11.312976Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-07-08T12:00:11.313351Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-07-08T12:00:11.313363Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-07-08T12:00:11.313593Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-07-08T12:00:11.313601Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-07-08T12:00:11.314088Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-07-08T12:00:11.314097Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-07-08T12:00:11.314235Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-07-08T12:00:11.314240Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-07-08T12:00:11.314379Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-07-08T12:00:11.314386Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-07-08T12:00:11.314491Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715662 2025-07-08T12:00:11.314561Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715662 2025-07-08T12:00:11.314572Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715662 2025-07-08T12:00:11.314576Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715662 2025-07-08T12:00:11.314580Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715662 2025-07-08T12:00:11.314613Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715662 2025-07-08T12:00:11.314661Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715662 2025-07-08T12:00:11.314666Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715662 2025-07-08T12:00:11.314671Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715662 2025-07-08T12:00:11.314675Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715662 2025-07-08T12:00:11.314688Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715662 2025-07-08T12:00:11.321097Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715662 2025-07-08T12:00:11.321192Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715662 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-07-08T12:00:11.321218Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715662 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-07-08T12:00:11.321317Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1061:2828], serverId# [3:1062:2829], sessionId# [0:0:0] 2025-07-08T12:00:11.321327Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1060:2827], serverId# [3:1063:2830], sessionId# [0:0:0] 2025-07-08T12:00:11.321367Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715662 from tabeltId 72075186224037889 2025-07-08T12:00:11.321515Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715662 from tabeltId 72075186224037889 2025-07-08T12:00:11.321897Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715662 2025-07-08T12:00:11.321919Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-07-08T12:00:11.321944Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:11.321966Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-07-08T12:00:11.321989Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1066:2833] 2025-07-08T12:00:11.321994Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-07-08T12:00:11.322000Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-07-08T12:00:11.322005Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-07-08T12:00:11.322122Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715662 2025-07-08T12:00:11.322260Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-07-08T12:00:11.322268Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-07-08T12:00:11.322333Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-07-08T12:00:11.322342Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:11.322353Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-07-08T12:00:11.322358Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-07-08T12:00:11.322391Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1061:2828], serverId# [3:1062:2829], sessionId# [0:0:0] 2025-07-08T12:00:11.322403Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715662 2025-07-08T12:00:11.322410Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-07-08T12:00:11.322420Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:11.322429Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-07-08T12:00:11.322436Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1068:2835] 2025-07-08T12:00:11.322440Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-07-08T12:00:11.322443Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-07-08T12:00:11.322450Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-07-08T12:00:11.322481Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715662 2025-07-08T12:00:11.322611Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-07-08T12:00:11.322617Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-07-08T12:00:11.322632Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1000 next step 2000 2025-07-08T12:00:11.322638Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-07-08T12:00:11.322649Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-07-08T12:00:11.322653Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:11.322657Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-07-08T12:00:11.322661Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-07-08T12:00:11.322674Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1060:2827], serverId# [3:1063:2830], sessionId# [0:0:0] 2025-07-08T12:00:11.322707Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1000 next step 2000 2025-07-08T12:00:11.322712Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-07-08T12:00:11.343717Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715662 2025-07-08T12:00:11.344869Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715662, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-07-08T12:00:11.345378Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-07-08T12:00:11.345397Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-07-08T12:00:11.345455Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:924:2726], serverId# [3:925:2727], sessionId# [0:0:0] 2025-07-08T12:00:11.345475Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:11.345479Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-07-08T12:00:11.345512Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715662 2025-07-08T12:00:11.345524Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-07-08T12:00:11.345529Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 >> TS3WrapperTests::GetObject >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::CreateSystemColumn >> Viewer::Plan2SvgBad [GOOD] |66.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |66.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TS3WrapperTests::GetObject [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:04.845609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:04.845644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:04.845650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:04.845656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:04.845669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:04.845673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:04.845681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:04.845693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:04.845755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:04.858807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:04.858826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:04.864638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:04.864684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:04.864709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:04.865971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:04.866031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:04.866117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.866282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:04.866981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.867027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:04.867242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:04.867251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.867266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:04.867272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:04.867278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:04.867300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.868510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:04.924475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:04.924539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.924583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:04.924624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:04.924632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:04.929264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:04.929279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:04.929284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:04.929705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:04.929988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.929996Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.930001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.930007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.930672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:04.931014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:04.931046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:04.931198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.931219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:04.931228Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.931289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:04.931295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.931321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:04.931330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:04.931665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:04.931672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:04.931711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.931716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:04.931726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.931731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:04.931740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:04.931744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.931748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:04.931751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.931755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:04.931760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.931764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:04.931768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:04.931777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:04.931782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:04.931786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:04.932178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:04.932190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... poseTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.707757Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-07-08T12:00:11.707761Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-07-08T12:00:11.707798Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-07-08T12:00:11.707808Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 5000005 2025-07-08T12:00:11.707813Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 5000005 2025-07-08T12:00:11.707818Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.707821Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-07-08T12:00:11.707825Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T12:00:11.707848Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-07-08T12:00:11.707858Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-07-08T12:00:11.707864Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-07-08T12:00:11.707868Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.707871Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T12:00:11.707923Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-07-08T12:00:11.707960Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-07-08T12:00:11.709948Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.709994Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710011Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710026Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710037Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710054Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710089Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710097Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:11.710180Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710186Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-07-08T12:00:11.710265Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:11.710273Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-07-08T12:00:11.710290Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:11.710295Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:11.710300Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:11.710302Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:11.710307Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T12:00:11.710313Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:11.710319Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T12:00:11.710323Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T12:00:11.710370Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-07-08T12:00:11.710391Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-07-08T12:00:11.710396Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-07-08T12:00:11.710523Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:11.710538Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:11.710542Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:11.710547Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-07-08T12:00:11.710551Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-07-08T12:00:11.710566Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-07-08T12:00:11.722942Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T12:00:11.736406Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T12:00:11.736426Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T12:00:11.736539Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:00:11.736566Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T12:00:11.736574Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1456:3257] TestWaitNotification: OK eventTxId 104 2025-07-08T12:00:11.736680Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:11.736745Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 85us result status StatusSuccess 2025-07-08T12:00:11.736909Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-07-08T12:00:12.578663Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F716DB57-6B81-462E-8B81-60619461ACAD, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:19362 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C2322669-C85F-4BBD-8C15-20B588FE58BB amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-07-08T12:00:12.593599Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F716DB57-6B81-462E-8B81-60619461ACAD, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-07-08T12:00:12.597073Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4A1FCFA5-F8B5-49C0-8CDA-23B7136F00F2, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:19362 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 50350B8B-6EEB-499C-96B5-834E802675F7 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-07-08T12:00:12.600883Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4A1FCFA5-F8B5-49C0-8CDA-23B7136F00F2, response# GetObjectResult { } >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> TSchemeShardTest::CreateSystemColumn [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: Data has built Merge = 0.02990678226 Data has merged 2025-07-08T12:00:01.815897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679628035978257:2093];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:01.816569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:01.921331Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20635, node 1 2025-07-08T12:00:01.965194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:01.965205Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:01.965208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:01.965253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:01.965473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:01.965496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:01.966549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:02.008245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:02.012124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:02.018887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:02.021591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:02.022769Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:02.767187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:02.796069Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18997, node 2 2025-07-08T12:00:02.816268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:02.816280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:02.816282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:02.816334Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:02.869405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:02.869439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:02.869832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:02.871355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:02.871477Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:02.880422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:02.881098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:02.885414Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-07-08T12:00:03.749969Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679639062540733:2166];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:03.779568Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:03.797671Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63591, node 3 2025-07-08T12:00:03.829944Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:03.829955Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:03.829958Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:03.830007Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:00:03.867183Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:03.867215Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:03.868073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:03.873493Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:03.881334Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:03.892910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:03.893780Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:03.894839Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:04.783420Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31998, node 4 2025-07-08T12:00:04.821136Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:04.821150Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:04.821152Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:04.821200Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5650 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:00:04.861261Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:04.861288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStat ... 221182Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:06.221234Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:06.256072Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:06.256100Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:06.256437Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:06.261478Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:06.261743Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:06.266491Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:06.267196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:06.765381Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:06.765399Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:06.771698Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:06.838865Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:06.838881Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:06.895229Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:06.895246Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:06.939149Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:06.939166Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:06.978863Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:06.978879Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.039015Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.039031Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.125183Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.125208Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.153133Z node 5 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:07.198494Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.198510Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.279412Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.279435Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.347889Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.347905Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.468717Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.468736Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.524614Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.524631Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.593492Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.593512Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.635188Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.635206Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.730742Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.730757Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.801623Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.801640Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.886823Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:07.886839Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:07.894347Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:1, at schemeshard: 72057594046644480 2025-07-08T12:00:07.896016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715708:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.896386Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715707:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.049505Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:09.049525Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:09.174888Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:09.174905Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:09.242151Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:09.242173Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:09.401728Z node 5 :RPC_REQUEST WARN: Client lost 2025-07-08T12:00:09.401799Z node 5 :KQP_EXECUTER ERROR: ActorId: [5:7524679665233367903:2612] TxId: 281474976715722. Ctx: { TraceId: 01jzmygqmbaxxgxv116859w0da, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MjNhNTM3NGEtNTlmM2JlNzUtNzY0OTEzZjYtNDBlNjk1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ABORTED: {
: Error: Client lost } 2025-07-08T12:00:09.401848Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MjNhNTM3NGEtNTlmM2JlNzUtNzY0OTEzZjYtNDBlNjk1YQ==, ActorId: [5:7524679665233367883:2612], ActorState: ExecuteState, TraceId: 01jzmygqmbaxxgxv116859w0da, Create QueryResponse for error on request, msg: 2025-07-08T12:00:09.401963Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976009434, txId: 281474976715721] shutting down 2025-07-08T12:00:09.401992Z node 5 :KQP_COMPUTE ERROR: SelfId: [5:7524679665233367908:2617], TxId: 281474976715722, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jzmygqmbaxxgxv116859w0da. SessionId : ydb://session/3?node_id=5&id=MjNhNTM3NGEtNTlmM2JlNzUtNzY0OTEzZjYtNDBlNjk1YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [5:7524679665233367903:2612], status: ABORTED, reason: {
: Error: Terminate execution } test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:10.609073Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.621911Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24602, node 6 2025-07-08T12:00:10.658586Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.658606Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.658608Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.658662Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5785 2025-07-08T12:00:10.677464Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:10.677501Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:10.685380Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:11.373734Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:11.905552Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24379, node 7 2025-07-08T12:00:11.938028Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:11.938051Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:11.938053Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:11.938105Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28489 2025-07-08T12:00:11.976769Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:11.976804Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:11.978800Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:12.321256Z node 7 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-07-08T12:00:07.619800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:07.620078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:07.727769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:07.876251Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:07.905045Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:08.040531Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 3685, node 1 TClient is connected to server localhost:17911 2025-07-08T12:00:08.103643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:08.103664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:08.103669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:08.103763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Request timer = 0.006872156998 BASE_PERF = 1.719020833 2025-07-08T12:00:09.346216Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679662837040755:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:09.349038Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:09.386287Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61456, node 3 2025-07-08T12:00:09.445395Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:09.445406Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:09.445409Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:09.445464Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:09.457174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:09.457212Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:09.465429Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:09.504193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.509586Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:09.515198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.516153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.087802Z node 3 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:10.087822Z node 3 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:10.159199Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWVmMDZkZjItZjcwYjNiNTktNzIwYmE5ZWItZjhmNmU2MzI=, ActorId: [3:7524679667132008648:2294], ActorState: ExecuteState, TraceId: 01jzmygrba4pt7gq07f7gvr4sf, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:10.633201Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.663543Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11527, node 4 2025-07-08T12:00:10.709564Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.709580Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.709581Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.709639Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:10.728035Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:10.728067Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:10.733422Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27246 waiting... 2025-07-08T12:00:10.773810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.774971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.777289Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-07-08T12:00:11.137221Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999980s 2025-07-08T12:00:11.137316Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:11.137326Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:11.226270Z node 4 :GRPC_SERVER DEBUG: [0x51e7f665300] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226383Z node 4 :GRPC_SERVER DEBUG: [0x51e7f656300] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226422Z node 4 :GRPC_SERVER DEBUG: [0x51e7f661200] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226451Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65d100] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226483Z node 4 :GRPC_SERVER DEBUG: [0x51e7f662b00] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226512Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65b800] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226541Z node 4 :GRPC_SERVER DEBUG: [0x51e7f659000] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226568Z node 4 :GRPC_SERVER DEBUG: [0x51e7f659500] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226595Z node 4 :GRPC_SERVER DEBUG: [0x51e7f657c00] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226623Z node 4 :GRPC_SERVER DEBUG: [0x51e7f655400] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226657Z node 4 :GRPC_SERVER DEBUG: [0x51e7f657700] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226682Z node 4 :GRPC_SERVER DEBUG: [0x51e7f643c00] received request Name# LocalEnumerateTablets ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226715Z node 4 :GRPC_SERVER DEBUG: [0x51e7f612000] received request Name# KeyValue ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226743Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65ae00] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226772Z node 4 :GRPC_SERVER DEBUG: [0x51e7f644100] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226800Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65fe00] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226824Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65f900] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226847Z node 4 :GRPC_SERVER DEBUG: [0x51e7f659a00] received request Name# LocalMKQL ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226873Z node 4 :GRPC_SERVER DEBUG: [0x51e7f658b00] received request Name# LocalSchemeTx ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226899Z node 4 :GRPC_SERVER DEBUG: [0x51e7f656800] received request Name# TabletKillRequest ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226926Z node 4 :GRPC_SERVER DEBUG: [0x51e7ccc2100] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226951Z node 4 :GRPC_SERVER DEBUG: [0x51e7f65d600] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-07-08T12:00:11.226982Z node 4 :GRPC_SERVER DEBUG: [0x51e7f612600] received request Name# ExportToYt ok# false data# peer# 2025-07-08T12:00:11.227015Z node 4 :GRPC_SERVER DEBUG: [0x51e7f618c00] received request Name# ExportToS3 ok# false data# peer# 2025-07-08T12:00:11.227043Z node 4 :GRPC_SERVER DEBUG: [0x51e7f613e00] received request Name# ImportFromS3 ok# false data# peer# 2025-07-08T12:0 ... PC_SERVER DEBUG: [0x51e5dc81e00] received request Name# TopicService/DropTopic ok# false data# peer# 2025-07-08T12:00:12.387548Z node 5 :GRPC_SERVER DEBUG: [0x51e5dc80c00] received request Name# Coordination/CreateNode ok# false data# peer# 2025-07-08T12:00:12.387576Z node 5 :GRPC_SERVER DEBUG: [0x51e5dc83c00] received request Name# Coordination/AlterNode ok# false data# peer# 2025-07-08T12:00:12.387602Z node 5 :GRPC_SERVER DEBUG: [0x51e5dc82400] received request Name# Coordination/DropNode ok# false data# peer# 2025-07-08T12:00:12.387631Z node 5 :GRPC_SERVER DEBUG: [0x51e7f630000] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-07-08T12:00:12.387657Z node 5 :GRPC_SERVER DEBUG: [0x51e7f605400] received request Name# CreateDatabase ok# false data# peer# 2025-07-08T12:00:12.387685Z node 5 :GRPC_SERVER DEBUG: [0x51e7f627000] received request Name# GetDatabaseStatus ok# false data# peer# 2025-07-08T12:00:12.387712Z node 5 :GRPC_SERVER DEBUG: [0x51e5dc83600] received request Name# AlterDatabase ok# false data# peer# 2025-07-08T12:00:12.387740Z node 5 :GRPC_SERVER DEBUG: [0x51e7f620400] received request Name# ListDatabases ok# false data# peer# 2025-07-08T12:00:12.387767Z node 5 :GRPC_SERVER DEBUG: [0x51e7f625200] received request Name# RemoveDatabase ok# false data# peer# 2025-07-08T12:00:12.387793Z node 5 :GRPC_SERVER DEBUG: [0x51e7f624c00] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-07-08T12:00:12.387818Z node 5 :GRPC_SERVER DEBUG: [0x51e7f636000] received request Name# GetScaleRecommendation ok# false data# peer# 2025-07-08T12:00:12.387849Z node 5 :GRPC_SERVER DEBUG: [0x51e7f623400] received request Name# ListEndpoints ok# false data# peer# 2025-07-08T12:00:12.387877Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62b200] received request Name# WhoAmI ok# false data# peer# 2025-07-08T12:00:12.387903Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62c400] received request Name# NodeRegistration ok# false data# peer# 2025-07-08T12:00:12.387930Z node 5 :GRPC_SERVER DEBUG: [0x51e7f622e00] received request Name# Scan ok# false data# peer# 2025-07-08T12:00:12.387959Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62be00] received request Name# GetShardLocations ok# false data# peer# 2025-07-08T12:00:12.387987Z node 5 :GRPC_SERVER DEBUG: [0x51e7f635400] received request Name# DescribeTable ok# false data# peer# 2025-07-08T12:00:12.388013Z node 5 :GRPC_SERVER DEBUG: [0x51e7f631e00] received request Name# CreateSnapshot ok# false data# peer# 2025-07-08T12:00:12.388039Z node 5 :GRPC_SERVER DEBUG: [0x51e7f619e00] received request Name# RefreshSnapshot ok# false data# peer# 2025-07-08T12:00:12.388068Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61fe00] received request Name# DiscardSnapshot ok# false data# peer# 2025-07-08T12:00:12.388096Z node 5 :GRPC_SERVER DEBUG: [0x51e7f625800] received request Name# List ok# false data# peer# 2025-07-08T12:00:12.388123Z node 5 :GRPC_SERVER DEBUG: [0x51e7f638400] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-07-08T12:00:12.388153Z node 5 :GRPC_SERVER DEBUG: [0x51e7f63f600] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-07-08T12:00:12.388166Z node 5 :GRPC_SERVER DEBUG: [0x51e7f637800] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-07-08T12:00:12.388178Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62d000] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-07-08T12:00:12.388206Z node 5 :GRPC_SERVER DEBUG: [0x51e7f628e00] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-07-08T12:00:12.388236Z node 5 :GRPC_SERVER DEBUG: [0x51e7f601e00] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-07-08T12:00:12.388237Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62e800] received request Name# CreateStream ok# false data# peer# 2025-07-08T12:00:12.388270Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62fa00] received request Name# ListStreams ok# false data# peer# 2025-07-08T12:00:12.388279Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62f400] received request Name# DeleteStream ok# false data# peer# 2025-07-08T12:00:12.388313Z node 5 :GRPC_SERVER DEBUG: [0x51e7f631800] received request Name# DescribeStream ok# false data# peer# 2025-07-08T12:00:12.388327Z node 5 :GRPC_SERVER DEBUG: [0x51e7f63b400] received request Name# ListShards ok# false data# peer# 2025-07-08T12:00:12.388345Z node 5 :GRPC_SERVER DEBUG: [0x51e7f628200] received request Name# SetWriteQuota ok# false data# peer# 2025-07-08T12:00:12.388360Z node 5 :GRPC_SERVER DEBUG: [0x51e7f634800] received request Name# UpdateStream ok# false data# peer# 2025-07-08T12:00:12.388378Z node 5 :GRPC_SERVER DEBUG: [0x51e7f637200] received request Name# PutRecord ok# false data# peer# 2025-07-08T12:00:12.388392Z node 5 :GRPC_SERVER DEBUG: [0x51e7f639c00] received request Name# PutRecords ok# false data# peer# 2025-07-08T12:00:12.388410Z node 5 :GRPC_SERVER DEBUG: [0x51e7f63f000] received request Name# GetRecords ok# false data# peer# 2025-07-08T12:00:12.388424Z node 5 :GRPC_SERVER DEBUG: [0x51e7f631200] received request Name# GetShardIterator ok# false data# peer# 2025-07-08T12:00:12.388438Z node 5 :GRPC_SERVER DEBUG: [0x51e7f630600] received request Name# SubscribeToShard ok# false data# peer# 2025-07-08T12:00:12.388452Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62ee00] received request Name# DescribeLimits ok# false data# peer# 2025-07-08T12:00:12.388467Z node 5 :GRPC_SERVER DEBUG: [0x51e7f63ba00] received request Name# DescribeStreamSummary ok# false data# peer# 2025-07-08T12:00:12.388481Z node 5 :GRPC_SERVER DEBUG: [0x51e7f623a00] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-07-08T12:00:12.388494Z node 5 :GRPC_SERVER DEBUG: [0x51e7f622800] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-07-08T12:00:12.388511Z node 5 :GRPC_SERVER DEBUG: [0x51e7f622200] received request Name# UpdateShardCount ok# false data# peer# 2025-07-08T12:00:12.388523Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61e600] received request Name# UpdateStreamMode ok# false data# peer# 2025-07-08T12:00:12.388551Z node 5 :GRPC_SERVER DEBUG: [0x51e7f616200] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-07-08T12:00:12.388552Z node 5 :GRPC_SERVER DEBUG: [0x51e7f611400] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-07-08T12:00:12.388586Z node 5 :GRPC_SERVER DEBUG: [0x51e7f615c00] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-07-08T12:00:12.388590Z node 5 :GRPC_SERVER DEBUG: [0x51e7f614400] received request Name# ListStreamConsumers ok# false data# peer# 2025-07-08T12:00:12.388614Z node 5 :GRPC_SERVER DEBUG: [0x51e7f610800] received request Name# AddTagsToStream ok# false data# peer# 2025-07-08T12:00:12.388625Z node 5 :GRPC_SERVER DEBUG: [0x51e7f616800] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-07-08T12:00:12.388641Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61f800] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-07-08T12:00:12.388670Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61e000] received request Name# MergeShards ok# false data# peer# 2025-07-08T12:00:12.388675Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61ec00] received request Name# ListTagsForStream ok# false data# peer# 2025-07-08T12:00:12.388698Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61da00] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-07-08T12:00:12.388710Z node 5 :GRPC_SERVER DEBUG: [0x51e7f634200] received request Name# SplitShard ok# false data# peer# 2025-07-08T12:00:12.388725Z node 5 :GRPC_SERVER DEBUG: [0x51e7f634e00] received request Name# StartStreamEncryption ok# false data# peer# 2025-07-08T12:00:12.388742Z node 5 :GRPC_SERVER DEBUG: [0x51e7f633c00] received request Name# StopStreamEncryption ok# false data# peer# 2025-07-08T12:00:12.388753Z node 5 :GRPC_SERVER DEBUG: [0x51e7f627c00] received request Name# SelfCheck ok# false data# peer# 2025-07-08T12:00:12.388773Z node 5 :GRPC_SERVER DEBUG: [0x51e7f627600] received request Name# NodeCheck ok# false data# peer# 2025-07-08T12:00:12.388782Z node 5 :GRPC_SERVER DEBUG: [0x51e7f614a00] received request Name# CreateSession ok# false data# peer# 2025-07-08T12:00:12.388809Z node 5 :GRPC_SERVER DEBUG: [0x51e7f636600] received request Name# DeleteSession ok# false data# peer# 2025-07-08T12:00:12.388813Z node 5 :GRPC_SERVER DEBUG: [0x51e7f635a00] received request Name# AttachSession ok# false data# peer# 2025-07-08T12:00:12.388844Z node 5 :GRPC_SERVER DEBUG: [0x51e7f625e00] received request Name# BeginTransaction ok# false data# peer# 2025-07-08T12:00:12.388848Z node 5 :GRPC_SERVER DEBUG: [0x51e7f624600] received request Name# CommitTransaction ok# false data# peer# 2025-07-08T12:00:12.388874Z node 5 :GRPC_SERVER DEBUG: [0x51e7f62ca00] received request Name# RollbackTransaction ok# false data# peer# 2025-07-08T12:00:12.388883Z node 5 :GRPC_SERVER DEBUG: [0x51e7f613200] received request Name# ExecuteQuery ok# false data# peer# 2025-07-08T12:00:12.388902Z node 5 :GRPC_SERVER DEBUG: [0x51e7f612c00] received request Name# ExecuteScript ok# false data# peer# 2025-07-08T12:00:12.388919Z node 5 :GRPC_SERVER DEBUG: [0x51e7f616e00] received request Name# FetchScriptResults ok# false data# peer# 2025-07-08T12:00:12.388930Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61a400] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-07-08T12:00:12.388965Z node 5 :GRPC_SERVER DEBUG: [0x51e7f619200] received request Name# ChangeTabletSchema ok# false data# peer# 2025-07-08T12:00:12.388977Z node 5 :GRPC_SERVER DEBUG: [0x51e5dc8ba00] received request Name# RestartTablet ok# false data# peer# 2025-07-08T12:00:12.388996Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60a800] received request Name# CreateLogStore ok# false data# peer# 2025-07-08T12:00:12.389004Z node 5 :GRPC_SERVER DEBUG: [0x51e7f609600] received request Name# DescribeLogStore ok# false data# peer# 2025-07-08T12:00:12.389027Z node 5 :GRPC_SERVER DEBUG: [0x51e7f610200] received request Name# DropLogStore ok# false data# peer# 2025-07-08T12:00:12.389035Z node 5 :GRPC_SERVER DEBUG: [0x51e7f61aa00] received request Name# AlterLogStore ok# false data# peer# 2025-07-08T12:00:12.389056Z node 5 :GRPC_SERVER DEBUG: [0x51e7f618000] received request Name# CreateLogTable ok# false data# peer# 2025-07-08T12:00:12.389068Z node 5 :GRPC_SERVER DEBUG: [0x51e7f617a00] received request Name# DescribeLogTable ok# false data# peer# 2025-07-08T12:00:12.389087Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60f000] received request Name# DropLogTable ok# false data# peer# 2025-07-08T12:00:12.389106Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60cc00] received request Name# AlterLogTable ok# false data# peer# 2025-07-08T12:00:12.389123Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60d200] received request Name# Login ok# false data# peer# 2025-07-08T12:00:12.389142Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60b400] received request Name# DescribeReplication ok# false data# peer# 2025-07-08T12:00:12.389155Z node 5 :GRPC_SERVER DEBUG: [0x51e7f60a200] received request Name# DescribeView ok# false data# peer# >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> KqpScanArrowFormat::AggregateWithFunction [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 12406, MsgBus: 18145 2025-07-08T12:00:06.918669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679649184099981:2158];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014d6/r3tmp/tmpB58ahi/pdisk_1.dat 2025-07-08T12:00:06.960284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:07.025363Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12406, node 1 2025-07-08T12:00:07.061448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:07.061478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:07.062562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:07.077175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:07.077187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:07.077190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:07.077230Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18145 TClient is connected to server localhost:18145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:07.201990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.205253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:07.265900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.365632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.410193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.442175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.693560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.715980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.731830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.748555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.777756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.836365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.852136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.918508Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:08.269366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976008132, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 22096, MsgBus: 1123 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014d6/r3tmp/tmpApxAqR/pdisk_1.dat 2025-07-08T12:00:08.627534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:08.639800Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22096, node 2 2025-07-08T12:00:08.661560Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:08.661571Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:08.661574Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:08.661621Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1123 TClient is connected to server localhost:1123 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:08.725389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:08.725417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:08.726719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:08.733529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.736707Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:08.772235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.830813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.868714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.889529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.057626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.075426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.092112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.125562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.139783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.156098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.216789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.767321Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:09.802385Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976009553, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 20755, MsgBus: 19779 2025-07-08T12:00:10.292895Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679667774634641:2062];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014d6/r3tmp/tmpiuFSUB/pdisk_1.dat 2025-07-08T12:00:10.302442Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:10.327473Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20755, node 3 2025-07-08T12:00:10.357128Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.357143Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.357145Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.357192Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19779 2025-07-08T12:00:10.397094Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:10.397126Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:10.398911Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:10.418877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.420098Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:10.429750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.459499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.482450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.493329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.716829Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.741824Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.765505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.801978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.829505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.849912Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.877976Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.297253Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:11.867082Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976011345, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 24618, MsgBus: 64295 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014d6/r3tmp/tmp03TmfJ/pdisk_1.dat 2025-07-08T12:00:12.071739Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:12.083859Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24618, node 4 2025-07-08T12:00:12.101427Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:12.101441Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:12.101443Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:12.101491Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64295 2025-07-08T12:00:12.149324Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:12.149355Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:12.153383Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:12.161886Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.163325Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.171391Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.190837Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.225654Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.242637Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.530212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.542902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.553810Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.566338Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.580991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.594683Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.609069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.045386Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976012927, txId: 281474976715670] shutting down 2025-07-08T12:00:13.048403Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:02.312632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:02.312673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.312682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:02.312687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:02.312702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:02.312706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:02.312715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.312728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:02.312798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:02.346825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:02.346845Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:02.352785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:02.352837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:02.352866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:02.354073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:02.354136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:02.354219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.354363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:02.355005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.355052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:02.355286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.355293Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.355310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:02.355317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.355321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:02.355346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.356462Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:02.373892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:02.373975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.374049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:02.374099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:02.374108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.374669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.374694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:02.374734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.374742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:02.374747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:02.374752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:02.375047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.375055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:02.375059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:02.375314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.375321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.375326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.375333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.375839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:02.376185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:02.376223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:02.376380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.376399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:02.376409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.376473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:02.376479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.376507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:02.376517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:02.376841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.376847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.376892Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.376897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:02.376906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.376911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:02.376921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.376925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.376929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.376932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.376937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:02.376941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.376961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:02.376966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:02.376976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:02.376981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:02.376985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:02.377389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:02.377408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... eadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:13.018464Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:00:13.018467Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:13.018472Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-07-08T12:00:13.018486Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:342:2321] message: TxId: 102 2025-07-08T12:00:13.018492Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:13.018497Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T12:00:13.018501Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T12:00:13.018530Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:13.018957Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:13.018968Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:343:2322] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-07-08T12:00:13.019814Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:13.019866Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:00:13.019970Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:13.019982Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T12:00:13.019986Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:13.019992Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:13.020004Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:13.020021Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:13.020151Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:13.020161Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:13.020574Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-07-08T12:00:13.020628Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-07-08T12:00:13.020660Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:13.020664Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:13.020695Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:13.020709Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:13.020714Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-07-08T12:00:13.020719Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:207:2209], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-07-08T12:00:13.020787Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:00:13.020794Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-07-08T12:00:13.020832Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-07-08T12:00:13.021050Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:13.021062Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:13.021066Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:00:13.021071Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-07-08T12:00:13.021076Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:13.025045Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:13.025064Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:13.025072Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:00:13.025077Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-07-08T12:00:13.025083Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:13.025101Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-07-08T12:00:13.025371Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-07-08T12:00:13.025405Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-07-08T12:00:13.025412Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-07-08T12:00:13.025608Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-07-08T12:00:13.025653Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-07-08T12:00:13.025724Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-07-08T12:00:13.025730Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-07-08T12:00:13.025742Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-07-08T12:00:13.025748Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-07-08T12:00:13.025754Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-07-08T12:00:13.025777Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-07-08T12:00:13.025992Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:00:13.026393Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:00:13.034249Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:00:13.034302Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:00:13.034312Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-07-08T12:00:13.034324Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-07-08T12:00:13.038828Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-07-08T12:00:13.038869Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-07-08T12:00:13.038886Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-07-08T12:00:13.038891Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 15980, MsgBus: 16182 2025-07-08T12:00:06.759747Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679651614364041:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:06.761022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014db/r3tmp/tmp2OiWTM/pdisk_1.dat 2025-07-08T12:00:06.855569Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15980, node 1 2025-07-08T12:00:06.903410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:06.903427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:06.903430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:06.903476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:06.918830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:06.918868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:16182 2025-07-08T12:00:06.925358Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:07.010578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.021864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:07.078212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.099083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.123441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.139104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:07.298704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.309233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.328061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.336657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.351903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.369027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:07.387369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.076113Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:08.252443Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976007740, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 31942, MsgBus: 4512 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014db/r3tmp/tmphdw17z/pdisk_1.dat 2025-07-08T12:00:08.495835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:08.511046Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31942, node 2 2025-07-08T12:00:08.548796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:08.548812Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:08.548814Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:08.548858Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4512 2025-07-08T12:00:08.593941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:08.593969Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:08.594921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:08.709959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.711844Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:08.722221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.749951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:08.806043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.893815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.088485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.104462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.121393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.140134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.157279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.180430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.201865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.467153Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:10.067375Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976009658, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 18599, MsgBus: 12287 2025-07-08T12:00:10.479300Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679669411974949:2126];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:10.479451Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014db/r3tmp/tmpGPZl9l/pdisk_1.dat 2025-07-08T12:00:10.502649Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18599, node 3 2025-07-08T12:00:10.521212Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.521225Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.521228Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.521275Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12287 TClient is connected to server localhost:12287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:10.577143Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:10.577175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:10.578140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:10.597578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.601510Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:10.605193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:10.625113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.690139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.751463Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.863262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.875696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.889202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.914257Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.938027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.996397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.008933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.387663Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976011282, txId: 281474976715670] shutting down 2025-07-08T12:00:11.477158Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 7740, MsgBus: 24779 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0014db/r3tmp/tmp3HnS9c/pdisk_1.dat 2025-07-08T12:00:11.817057Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:11.829208Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7740, node 4 2025-07-08T12:00:11.881559Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:11.881574Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:11.881575Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:11.881624Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:11.893416Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:11.893454Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:11.897387Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24779 TClient is connected to server localhost:24779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:12.061908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.070853Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:12.089112Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.127374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.203377Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.227925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.705259Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.726888Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.747814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.762274Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.785460Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.797810Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:12.810351Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.831735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.220030Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976013151, txId: 281474976715670] shutting down >> TS3WrapperTests::GetUnknownObject >> LocalPartition::WithoutPartitionDeadNode [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> LocalPartition::WithoutPartitionPartitionRelocation >> TS3WrapperTests::PutObject ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6738, MsgBus: 24468 2025-07-08T12:00:08.205651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679660497389408:2058];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:08.205693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148d/r3tmp/tmpoFCbjl/pdisk_1.dat 2025-07-08T12:00:08.293048Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6738, node 1 2025-07-08T12:00:08.300775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:08.300795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:08.300798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:08.300838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24468 TClient is connected to server localhost:24468 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:00:08.342423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:08.342448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:08.343080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:08.354733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.357021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:08.365023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:08.441173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.482028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.499157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.657177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.673727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.692737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.704291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.721515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.735025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.748376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.093695Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679664792359261:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmygqb51ygw5gw5evz9zx2b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzUxNDAwOTQtOGJmZDc3YmItOGFkMWIxZjEtOTAxMzQzNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:00:09.095658Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976009140, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 64639, MsgBus: 21546 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148d/r3tmp/tmpiNzpF9/pdisk_1.dat 2025-07-08T12:00:09.422605Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679664747250662:2089];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:09.424214Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:09.461270Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64639, node 2 2025-07-08T12:00:09.493260Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:09.493270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:09.493273Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:09.493317Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21546 2025-07-08T12:00:09.517343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:09.517370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:09.523986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21546 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:09.605489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.612202Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:09.617235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.652204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.689812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.726258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.905557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.920574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.934816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.947713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.958424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.967928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.982729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.162082Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: disc ... d: 281474976715670] shutting down 2025-07-08T12:00:10.244185Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976010281, txId: 281474976715672] shutting down 2025-07-08T12:00:10.417248Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 30457, MsgBus: 3632 2025-07-08T12:00:10.803533Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679668261541805:2147];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:10.806318Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148d/r3tmp/tmp15f8zc/pdisk_1.dat 2025-07-08T12:00:10.844185Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30457, node 3 2025-07-08T12:00:10.881473Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.881482Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.881484Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.881527Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3632 2025-07-08T12:00:10.925357Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:10.925384Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:10.929336Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3632 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:10.993442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:10.995135Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:11.009594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:11.050036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.100219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.124934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.169053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.181544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.194821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.211768Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.227532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.238227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.255025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.718333Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976011562, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 4989, MsgBus: 11127 2025-07-08T12:00:11.982389Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148d/r3tmp/tmp2Is5CV/pdisk_1.dat 2025-07-08T12:00:12.013304Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:12.015013Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:12.015038Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:12.015475Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4989, node 4 2025-07-08T12:00:12.033528Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:12.033541Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:12.033543Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:12.033586Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11127 TClient is connected to server localhost:11127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:12.103474Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.104943Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:12.129570Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.186246Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:12.257926Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.281996Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:12.365462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.388256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.401366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.462252Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.479772Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.540743Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:12.554516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.093199Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:13.121889Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976012864, txId: 281474976715670] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:01.164737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:01.164779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:01.164785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:01.164789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:01.164802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:01.164806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:01.164813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:01.164826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:01.164893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:01.196553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:01.196577Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:01.215858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:01.215930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:01.215960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:01.234412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:01.234486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:01.234569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:01.234750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:01.235497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:01.235543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:01.235792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:01.235799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:01.235814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:01.235821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:01.235827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:01.235850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.236969Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:01.274979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:01.275066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.275130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:01.275178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:01.275188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.281320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:01.281360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:01.281413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.281424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:01.281429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:01.281435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:01.281981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.281992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:01.281997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:01.282293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.282303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.282308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:01.282315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:01.282992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:01.283423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:01.283460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:01.283628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:01.283650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:01.283665Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:01.283727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:01.283734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:01.283765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:01.283776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:01.284272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:01.284281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:01.284326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:01.284331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:01.284341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:01.284347Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:01.284358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:01.284362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:01.284367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:01.284370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:01.284374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:01.284379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:01.284383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:01.284387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:01.284397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:01.284404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:01.284408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:01.284833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:01.284849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... e: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246121Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246137Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 18us result status StatusSuccess 2025-07-08T12:00:13.246177Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246243Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246262Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 21us result status StatusSuccess 2025-07-08T12:00:13.246321Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246397Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246412Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 17us result status StatusSuccess 2025-07-08T12:00:13.246447Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246516Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:13.246535Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 20us result status StatusSuccess 2025-07-08T12:00:13.246588Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:02.252837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:02.252874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.252879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:02.252884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:02.252898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:02.252902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:02.252912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.252924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:02.253017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:02.267114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:02.267134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:02.271348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:02.271419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:02.271456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:02.273191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:02.273267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:02.273357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.273560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:02.274508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.274564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:02.274807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.274818Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.274836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:02.274844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.274849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:02.274877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.276311Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:02.295724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:02.295810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.295875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:02.295923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:02.295933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.296699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.296727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:02.296772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.296782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:02.296787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:02.296792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:02.297251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.297266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:02.297271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:02.297642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.297653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.297659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.297665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.298283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:02.298770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:02.298814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:02.298986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.299010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:02.299021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.299091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:02.299098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.299128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:02.299140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:02.299595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.299605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.299653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.299659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:02.299669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.299675Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:02.299687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.299691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.299695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.299698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.299702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:02.299707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.299711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:02.299715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:02.299726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:02.299732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:02.299736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:02.300162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:02.300180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 5-07-08T12:00:12.730371Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:00:12.730376Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:12.730448Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.730458Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.730461Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:12.730465Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:12.730469Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:12.730481Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-07-08T12:00:12.732564Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-07-08T12:00:12.732603Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-07-08T12:00:12.734270Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:12.734303Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 64424511589 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:12.734314Z node 15 :FLAT_TX_SCHEMESHARD INFO: TDropSolomon TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T12:00:12.734327Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 3] name: Obj type: EPathTypeSolomonVolume state: EPathStateDrop stepDropped: 0 droppedTxId: 104 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:12.734332Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:12.734372Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:12.734395Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 130 2025-07-08T12:00:12.734428Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:12.734440Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:12.734726Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:12.735779Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-07-08T12:00:12.736492Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:12.736501Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:12.736552Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:12.736581Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:12.736586Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:450:2410], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-07-08T12:00:12.736591Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:450:2410], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T12:00:12.736602Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:12.736610Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-07-08T12:00:12.736625Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:12.736629Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:12.736634Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:12.736637Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:12.736643Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T12:00:12.736647Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:12.736652Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T12:00:12.736656Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T12:00:12.736693Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:12.736700Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-07-08T12:00:12.736704Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T12:00:12.736708Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-07-08T12:00:12.736903Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.736917Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.736922Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:12.736928Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T12:00:12.736933Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:12.737088Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.737100Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:12.737104Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:12.737107Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T12:00:12.737112Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:12.737123Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-07-08T12:00:12.741901Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:12.742742Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-07-08T12:00:12.743070Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T12:00:12.743154Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:12.743247Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:00:12.743255Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:12.743269Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:12.743472Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:12.743494Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:12.744444Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T12:00:12.744461Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T12:00:12.746791Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T12:00:12.746880Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T12:00:12.746888Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T12:00:12.746969Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:00:12.746990Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T12:00:12.746996Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [15:576:2518] TestWaitNotification: OK eventTxId 104 >> TS3WrapperTests::GetUnknownObject [GOOD] >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 63747, MsgBus: 1893 2025-07-08T12:00:07.832661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679656169124721:2139];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:07.832671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148e/r3tmp/tmp9x1t2I/pdisk_1.dat 2025-07-08T12:00:07.936681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:07.936711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:07.943867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:07.945639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63747, node 1 2025-07-08T12:00:07.997425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:07.997437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:07.997439Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:07.997478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1893 TClient is connected to server localhost:1893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:08.150923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.170511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:08.177718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.229823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:08.276074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:08.302285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.569690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.592746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.616749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.629141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.650000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.670470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.688429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:08.833180Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:08.911299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.057930Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679664759061882:2468] TxId: 281474976715675. Ctx: { TraceId: 01jzmygq9nd2h8y1djg9v0ckht, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA2ODUzM2YtYTYzOGJiOGUtZjc5NzY0NGQtN2ExZTY2ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:00:09.061030Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976009098, txId: 281474976715674] shutting down 864000000000 Trying to start YDB, gRPC: 12872, MsgBus: 20358 2025-07-08T12:00:09.557097Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679663764235450:2241];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:09.559555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148e/r3tmp/tmpiuVNK8/pdisk_1.dat 2025-07-08T12:00:09.593475Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12872, node 2 2025-07-08T12:00:09.609856Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:09.609870Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:09.609872Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:09.609918Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20358 2025-07-08T12:00:09.665329Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:09.665359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:09.666509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:09.721374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.736053Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:09.782085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.805986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.842203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.862288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:09.974318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.984045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:09.997204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.011603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.024655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.039820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... 7-08T12:00:10.235509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:00:10.326449Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976010365, txId: 281474976715674] shutting down 864000000000 Trying to start YDB, gRPC: 30151, MsgBus: 13500 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148e/r3tmp/tmpAl4hU8/pdisk_1.dat 2025-07-08T12:00:11.015479Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:11.033202Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30151, node 3 2025-07-08T12:00:11.045494Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:11.045509Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:11.045512Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:11.045558Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13500 2025-07-08T12:00:11.109512Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:11.109536Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:11.117283Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:11.157447Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.159326Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:11.173604Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.193624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.225194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.284658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:11.549375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.567395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.597941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.626007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.651494Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.670903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.708799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:11.975485Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:12.608125Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976012136, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 22267, MsgBus: 20095 2025-07-08T12:00:13.001122Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00148e/r3tmp/tmp5A1Dg6/pdisk_1.dat 2025-07-08T12:00:13.016450Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22267, node 4 2025-07-08T12:00:13.031334Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:13.031349Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:13.031354Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:13.031405Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20095 2025-07-08T12:00:13.101139Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:13.101168Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:13.101877Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:13.165364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.168149Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:13.253956Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:13.290007Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:13.336423Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:13.366121Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:13.433660Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.445108Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.457348Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.469129Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.481622Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.497485Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.511537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:13.876413Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976013781, txId: 281474976715670] shutting down 2025-07-08T12:00:13.998144Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TS3WrapperTests::PutObject [GOOD] |66.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TS3WrapperTests::AbortMultipartUpload ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-07-08T12:00:14.363301Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 45F4AB2E-7E5C-4497-BB71-32AC92DDF7D0, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:28884 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DC051178-25AE-4402-9515-ABC5A9875661 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-07-08T12:00:14.364910Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 45F4AB2E-7E5C-4497-BB71-32AC92DDF7D0, response# No response body. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-07-08T12:00:00.628216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679626831300405:2239];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:00.677157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:00.725158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:00.725184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:00.727053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26209, node 1 2025-07-08T12:00:00.739352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:00.745144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:00.745154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:00.745156Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:00.745201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:00.808561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:00.813349Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:00.842634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:00.846703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:00.849239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-07-08T12:00:01.966900Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679628630057429:2228];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:01.966995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:02.013442Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15197, node 2 2025-07-08T12:00:02.029620Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:02.029642Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:02.029644Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:02.029685Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:02.074339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:02.074373Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:02.075336Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:02.106345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:02.113198Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:02.123427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:02.129433Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:02.133893Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:03.105092Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:03.116401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8347, node 3 2025-07-08T12:00:03.140693Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:03.140706Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:03.140708Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:03.140762Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:03.186460Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:03.186497Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:03.201374Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:03.223107Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:03.225507Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:03.237449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:03.238603Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:03.239876Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:04.221641Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:04.253570Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22402, node 4 2025-07-08T12:00:04.289055Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:04.289069Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:04.289071Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:04.289131Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:04.313689Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:04.313723Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:04.314078Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:04.321568Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:04.329070Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:04.342395Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:04.343639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:04.345372Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-07-08T12:00:05.201034Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:06.995488Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:07.074669Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:07.213706Z node 5 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:07.225191Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:07.275228Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 15166, node 5 TClient is connected to server localhost:14457 2025-07-08T12:00:07.344791Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:07.344810Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:07.344814Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:07.344933Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":16,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100100111","FieldsRequired":"0000000000000000000000000100101","Problems":["no-database-board-info"],"Nodes":[{"NodeId":6,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[200.5712890625,124.263671875,54.6826171875],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-3z2hjo4icm.auto.internal","Version":".f7e5ffb","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-07-08T12:00:08.882459Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:09.076481Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:09.214321Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:09.245193Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:09.309617Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 65374, node 7 TClient is connected to server localhost:61306 2025-07-08T12:00:09.403064Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:09.403082Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:09.403087Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:09.410313Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":16,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100000111","FieldsRequired":"0000000000000000000000000000101","Nodes":[{"NodeId":9,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[201.48681640625,125.720703125,55.52783203125],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-3z2hjo4icm.auto.internal","Version":".f7e5ffb","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-07-08T12:00:11.044745Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:11.182504Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:11.320762Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:11.343687Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:11.421378Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 5322, node 10 TClient is connected to server localhost:22070 2025-07-08T12:00:11.491797Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:11.491820Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:11.491825Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:11.491954Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":16,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"0000000010000110111111100000111","FieldsRequired":"0000000000000000000000000000101","Nodes":[{"NodeId":11,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[201.48681640625,125.720703125,55.52783203125],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-3z2hjo4icm.auto.internal","Version":".f7e5ffb","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-07-08T12:00:13.205563Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:13.373484Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:13.510151Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:13.515406Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:13.576984Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 4280, node 13 TClient is connected to server localhost:62945 2025-07-08T12:00:13.627711Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:13.627732Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:13.627736Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:13.627885Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration json result: {"Version":16,"TotalNodes":"2","FoundNodes":"2","FieldsAvailable":"0000000010100110111111100100111","FieldsRequired":"0000000000000000000000000100101","Nodes":[{"NodeId":15,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[203.6904296875,127.435546875,56.4599609375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-3z2hjo4icm.auto.internal","Version":".f7e5ffb","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}},{"NodeId":16,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[203.6904296875,127.435546875,56.4599609375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-3z2hjo4icm.auto.internal","Version":".f7e5ffb","Location":{"DataCenter":"4","Module":"4","Rack":"4","Unit":"4"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64},"Tablets":[{"Type":"DataShard","State":"Green","Count":1}]}]} >> TExternalTableTest::DropTableTwice >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TS3WrapperTests::AbortMultipartUpload [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-07-08T12:00:14.552904Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 052DD4B0-4FB6-45F5-AB9B-F29284B1C7CB, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:61265 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2EAF44C2-BA3B-40D5-9AB7-2D5ADD0712CD amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-07-08T12:00:14.569377Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 052DD4B0-4FB6-45F5-AB9B-F29284B1C7CB, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TExternalTableTest::CreateExternalTable >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-07-08T12:00:15.292316Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 3A424546-E549-45B3-A1D7-ACEF9A36C8E3, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:27319 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 25E70E40-5AAE-4FB6-8310-F6100A4FDE54 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-07-08T12:00:15.305607Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 3A424546-E549-45B3-A1D7-ACEF9A36C8E3, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-07-08T12:00:15.309067Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 50465E3F-69C2-4062-B15B-E2E93387CB62, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27319 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B630C125-3DC1-4268-96CB-FFD6B1BD5F85 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-07-08T12:00:15.310017Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 50465E3F-69C2-4062-B15B-E2E93387CB62, response# AbortMultipartUploadResult { } 2025-07-08T12:00:15.310172Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 36B2EE76-6342-4796-B1C4-8B118D39968D, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:27319 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B7C23321-23EC-4876-81FD-805AD3ED0B02 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-07-08T12:00:15.310731Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 36B2EE76-6342-4796-B1C4-8B118D39968D, response# No response body. |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> TExternalTableTest::ParallelCreateExternalTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:15.804943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:15.804980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:15.804985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:15.804989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:15.805000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:15.805003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:15.805010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:15.805025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:15.805095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:15.813829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:15.813847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:15.816748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:15.816777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:15.816794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:15.819742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:15.819838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:15.819929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.820007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:15.820679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.820713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:15.820888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:15.820894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.820907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:15.820912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:15.820916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:15.820940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.822111Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:15.837008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:15.837085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.837156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:15.837205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:15.837216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.842856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.842885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:15.842932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.842950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:15.842954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:15.842957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:15.843349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.843358Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:15.843361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:15.843581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.843588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.843592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.843597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.844007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:15.844282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:15.844322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:15.844458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.844475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:15.844480Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.844536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:15.844540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.844563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:15.844573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:15.844859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:15.844864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:15.844898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.844902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:15.844934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.844938Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:15.844960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:15.844965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.844969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:15.844973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.844977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:15.844982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.844987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:15.844989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:15.844997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:15.845001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:15.845003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:15.845344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:15.845356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... rd: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T12:00:15.852756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:15.852761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:15.852782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:15.852793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:15.852800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.852803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-07-08T12:00:15.852805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-07-08T12:00:15.852808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-07-08T12:00:15.852846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.852850Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T12:00:15.852857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:00:15.852861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:15.852868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:00:15.852870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:15.852874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-07-08T12:00:15.852878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:15.852883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T12:00:15.852886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T12:00:15.852895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:15.852899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T12:00:15.852901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-07-08T12:00:15.852903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:00:15.853008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:15.853015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:15.853018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:15.853020Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-07-08T12:00:15.853023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:15.853165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:15.853172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:15.853174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:15.853176Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:15.853178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:15.853184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:00:15.853449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:00:15.853577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:00:15.853606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:00:15.853610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:00:15.853648Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:15.853658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:15.853661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:327:2318] TestWaitNotification: OK eventTxId 102 2025-07-08T12:00:15.853714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:15.853731Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 22us result status StatusSuccess 2025-07-08T12:00:15.853774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-07-08T12:00:15.854241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:15.854270Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-07-08T12:00:15.854279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2025-07-08T12:00:15.854298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, at schemeshard: 72057594046678944 2025-07-08T12:00:15.854609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-07-08T12:00:15.854628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, source_location: ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp:133, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T12:00:15.854660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:00:15.854664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T12:00:15.854698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:00:15.854707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:00:15.854709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:335:2326] TestWaitNotification: OK eventTxId 103 |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] |66.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:15.796070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:15.796138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:15.796143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:15.796148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:15.796162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:15.796166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:15.796174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:15.796190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:15.796278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:15.807091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:15.807114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:15.812413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:15.812449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:15.812478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:15.814267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:15.814371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:15.814480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.814572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:15.815346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.815386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:15.815609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:15.815619Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.815636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:15.815643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:15.815648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:15.815681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.817105Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:15.837293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:15.837369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.837428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:15.837490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:15.837500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.838238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.838267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:15.838322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.838342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:15.838347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:15.838352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:15.838735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.838746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:15.838751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:15.839083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.839094Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.839099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.839106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.839657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:15.840043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:15.840081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:15.840245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:15.840266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:15.840275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.840350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:15.840356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:15.840396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:15.840407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:15.840787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:15.840795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:15.840832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:15.840838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:15.840875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:15.840881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:15.840891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:15.840895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.840900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:15.840903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.840907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:15.840912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:15.840915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:15.840919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:15.840929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:15.840935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:15.840939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:15.841326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:15.841343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... 125: satisfy waiter [2:374:2365] 2025-07-08T12:00:16.194465Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.194487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:374:2365] 2025-07-08T12:00:16.194493Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.194513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:374:2365] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-07-08T12:00:16.194582Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194611Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 40us result status StatusSuccess 2025-07-08T12:00:16.194681Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194765Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194778Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 15us result status StatusSuccess 2025-07-08T12:00:16.194828Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194902Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.194912Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 11us result status StatusSuccess 2025-07-08T12:00:16.194963Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.195007Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.195019Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 13us result status StatusSuccess 2025-07-08T12:00:16.195049Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.195082Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.195092Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 12us result status StatusSuccess 2025-07-08T12:00:16.195124Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> BasicUsage::ConflictingWrites [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:16.167873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:16.167897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:16.167902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:16.167906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:16.167919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:16.167923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:16.167930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:16.167946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:16.168016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:16.210495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:16.210526Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:16.220068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:16.220118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:16.220165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:16.222473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:16.222581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:16.222702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.222801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:16.223440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.223481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:16.223708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:16.223716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.223733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:16.223740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:16.223745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:16.223780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.227821Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:16.290443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:16.290529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.290603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:16.290653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:16.290665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.293950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.293990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:16.294062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.294087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:16.294092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:16.294097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:16.298180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.298207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:16.298215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:16.301742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.301762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.301770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.301779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.302471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:16.309246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:16.309311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:16.309528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.309575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.309586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.309688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:16.309698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.309735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:16.309750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:16.314025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:16.314039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:16.314100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.314107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:16.314156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.314165Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:16.314181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:16.314186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.314191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:16.314193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.314198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:16.314204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.314209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:16.314213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:16.314234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:16.314240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:16.314244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:16.314822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:16.314837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... 9Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427132Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 117, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 118, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427187Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427209Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427228Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427258Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427270Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427280Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427317Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427358Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427383Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427419Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427501Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2025-07-08T12:00:16.427523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:424:2415] 2025-07-08T12:00:16.427652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.427656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:424:2415] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2025-07-08T12:00:16.428089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.428139Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 67us result status StatusSuccess 2025-07-08T12:00:16.428222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:16.016667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:16.016690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:16.016695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:16.016700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:16.016713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:16.016717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:16.016725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:16.016741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:16.016807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:16.036630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:16.036654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:16.039997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:16.040037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:16.040062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:16.041449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:16.041544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:16.041641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.041718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:16.042391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.042427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:16.042631Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:16.042639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.042655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:16.042662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:16.042667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:16.042697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.043782Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:16.061345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:16.061414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.061470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:16.061508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:16.061517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:16.062269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:16.062290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:16.062295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:16.062580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:16.062848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.062859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.062864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.063345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:16.063619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:16.063647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:16.063791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:16.063808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:16.063816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.063872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:16.063878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:16.063901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:16.063910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:16.064206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:16.064212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:16.064244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:16.064248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:16.064279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:16.064284Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:16.064293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:16.064296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.064300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:16.064303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.064307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:16.064311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:16.064315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:16.064318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:16.064327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:16.064331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:16.064335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:16.064668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:16.064680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:00:16.375541Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375552Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:16.375557Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T12:00:16.375560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:16.375733Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375745Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:16.375748Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:16.375751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:16.375826Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:16.375837Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:16.375840Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:00:16.375843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:16.375850Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:00:16.376093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:00:16.376280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:00:16.376292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:00:16.376330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:00:16.376335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:00:16.376400Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:16.376412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.376416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:339:2330] TestWaitNotification: OK eventTxId 102 2025-07-08T12:00:16.376471Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.376492Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 28us result status StatusSuccess 2025-07-08T12:00:16.376551Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-07-08T12:00:16.377203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:16.377244Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-07-08T12:00:16.377261Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-07-08T12:00:16.377279Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, at schemeshard: 72057594046678944 2025-07-08T12:00:16.377641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:16.377669Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T12:00:16.377709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:00:16.377714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T12:00:16.377758Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:00:16.377770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:00:16.377774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:347:2338] TestWaitNotification: OK eventTxId 103 2025-07-08T12:00:16.377823Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:16.377841Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 22us result status StatusSuccess 2025-07-08T12:00:16.377891Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> TxUsage::WriteToTopic_Demo_19_RestartNo >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |66.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureMirror3dcCount6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 11 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 17 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 23 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 29 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 35 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 41 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 47 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 53 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 59 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 65 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 71 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 77 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 83 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 89 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 95 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 101 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 107 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 113 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 119 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 125 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 131 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 137 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 143 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 149 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 155 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 161 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 167 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 173 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 179 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 185 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 191 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 197 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 203 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 209 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 215 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 221 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 227 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 233 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 239 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 245 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 251 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 257 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 263 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 269 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 275 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 281 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 287 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 293 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 299 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 305 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 311 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 317 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 323 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 329 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 335 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 341 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 347 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 353 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 359 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 365 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 371 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 377 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 383 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 389 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 395 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 401 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 407 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 413 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 419 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 425 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 431 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 437 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 443 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 449 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 455 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 461 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 467 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 473 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 479 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 iteration# 485 BlobsWritten# 490 blobsWrittenFull# 391 blobsWrittenAlmostFull# 99 blobsUnwritten# 0 |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |66.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |66.8%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:03.744183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:03.744217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:03.744222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:03.744227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:03.744240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:03.744244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:03.744251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:03.744263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:03.744327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:03.773258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:03.773277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:03.776659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:03.776699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:03.776737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:03.781958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:03.782019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:03.782099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.782238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:03.782901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.782941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:03.783146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:03.783154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.783168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:03.783175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:03.783180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:03.783202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.784249Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:03.821912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:03.821974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.822020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:03.822064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:03.822074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:03.829262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:03.829276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:03.829284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:03.829682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829691Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:03.829957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829964Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.829969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.829976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.830641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:03.830971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:03.830999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:03.831153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.831174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:03.831182Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.831245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:03.831254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.831280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:03.831290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:03.831680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:03.831689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:03.831727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.831732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:03.831742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.831750Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:03.831760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:03.831764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.831769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:03.831772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.831776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:03.831781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.831785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:03.831789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:03.831798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:03.831802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:03.831807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:03.832235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:03.832254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273643Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273671Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 31us result status StatusSuccess 2025-07-08T12:00:18.273754Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273824Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273844Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 22us result status StatusSuccess 2025-07-08T12:00:18.273906Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273966Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:18.273980Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 16us result status StatusSuccess 2025-07-08T12:00:18.274035Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:18.274089Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:18.274115Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 28us result status StatusSuccess 2025-07-08T12:00:18.274169Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus >> TProxyActorTest::TestCreateSemaphoreInterrupted >> TProxyActorTest::TestDisconnectWhileAttaching >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TProxyActorTest::TestCreateSemaphore |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=8328;columns=19; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-07-08T11:59:00.948382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:00.952524Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:00.952585Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:00.953283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:00.953360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:00.953395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:00.953417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:00.953433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:00.953451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:00.953468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:00.953485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:00.953501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:00.953517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.953534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:00.953553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:00.959345Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:00.959530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:00.959541Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:00.959573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.959643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:00.959656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:00.959661Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:00.959670Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:00.959678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:00.959684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:00.959689Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:00.959705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:00.959712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:00.959719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:00.959723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:00.959731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:00.959737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:00.959744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:00.959748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:00.959756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:00.959763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:00.959767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:00.959805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:00.959812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:00.959816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:00.959836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:00.959843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:00.959848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:00.959860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:00.959867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.959871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:00.959877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:00.959884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:00.959890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:00.959894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:00.959930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=10; 2025-07-08T11:59:00.959938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-07-08T11:59:00.959946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:00.959956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=6; 2025-07-08T11:59:00.959966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:00.959977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:00.959984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:00.959989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:00.960001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSc ... NSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:88;event=TEvTaskProcessedResult; 2025-07-08T12:00:16.035537Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:75;event=DoApply;interval_idx=0; 2025-07-08T12:00:16.035543Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-07-08T12:00:16.035556Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-07-08T12:00:16.035562Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-07-08T12:00:16.035574Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035579Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-07-08T12:00:16.035584Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:207;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-07-08T12:00:16.035684Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T12:00:16.035701Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035707Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:49;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-07-08T12:00:16.035717Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:238;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-07-08T12:00:16.035727Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:258;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-07-08T12:00:16.035767Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:374;event=send_data;compute_actor_id=[54:412:2429];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-07-08T12:00:16.035780Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:278;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035791Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035802Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035830Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:107;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-07-08T12:00:16.035839Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:196;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035848Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:201;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035853Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:413:2430] finished for tablet 9437184 2025-07-08T12:00:16.035915Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:425;event=scan_finish;compute_actor_id=[54:412:2429];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1751976016033687,"name":"_full_task","f":1751976016033687,"d_finished":0,"c":0,"l":1751976016035861,"d":2174},"events":[{"name":"bootstrap","f":1751976016033759,"d_finished":416,"c":1,"l":1751976016034175,"d":416},{"a":1751976016035828,"name":"ack","f":1751976016035679,"d_finished":126,"c":1,"l":1751976016035805,"d":159},{"a":1751976016035826,"name":"processing","f":1751976016034384,"d_finished":1028,"c":10,"l":1751976016035805,"d":1063},{"name":"ProduceResults","f":1751976016034018,"d_finished":312,"c":13,"l":1751976016035850,"d":312},{"a":1751976016035851,"name":"Finish","f":1751976016035851,"d_finished":0,"c":0,"l":1751976016035861,"d":10},{"name":"task_result","f":1751976016034388,"d_finished":880,"c":9,"l":1751976016035593,"d":880}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035926Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:374;event=send_data;compute_actor_id=[54:412:2429];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-07-08T12:00:16.035964Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:379;event=scan_finished;compute_actor_id=[54:412:2429];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0},{"events":["f_ack","l_task_result"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1751976016033687,"name":"_full_task","f":1751976016033687,"d_finished":0,"c":0,"l":1751976016035931,"d":2244},"events":[{"name":"bootstrap","f":1751976016033759,"d_finished":416,"c":1,"l":1751976016034175,"d":416},{"a":1751976016035828,"name":"ack","f":1751976016035679,"d_finished":126,"c":1,"l":1751976016035805,"d":229},{"a":1751976016035826,"name":"processing","f":1751976016034384,"d_finished":1028,"c":10,"l":1751976016035805,"d":1133},{"name":"ProduceResults","f":1751976016034018,"d_finished":312,"c":13,"l":1751976016035850,"d":312},{"a":1751976016035851,"name":"Finish","f":1751976016035851,"d_finished":0,"c":0,"l":1751976016035931,"d":80},{"name":"task_result","f":1751976016034388,"d_finished":880,"c":9,"l":1751976016035593,"d":880}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-07-08T12:00:16.035975Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-07-08T12:00:16.033538Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2025-07-08T12:00:16.035981Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:192;event=scan_aborted;reason=unexpected on destructor; 2025-07-08T12:00:16.036016Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:413:2430];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed >> TxUsage::Sinks_Oltp_WriteToTopics_3 [GOOD] >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |66.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-07-08T12:00:20.639576Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> BSCReadOnlyPDisk::ReadOnlySlay >> TExternalTableTest::ParallelCreateSameExternalTable >> TxUsage::Sinks_Oltp_WriteToTopics_4 >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> BSCReadOnlyPDisk::ReadOnlyOneByOne ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 11728437566587787648 2025-07-08T12:00:22.026347Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026385Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026396Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026407Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026418Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026428Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026438Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026603Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026623Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026635Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026646Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026657Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026668Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026679Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.026691Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026697Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026701Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026713Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026717Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026722Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.026730Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.027039Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027050Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027057Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027066Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027074Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027082Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.027089Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TExternalTableTest::SchemeErrors >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 9544297498508397788 2025-07-08T12:00:22.221269Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221307Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221320Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221331Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221342Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221353Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221364Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221374Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221554Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221574Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221588Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221599Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221610Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221621Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221632Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221643Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.221658Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:0:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221664Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:5:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221669Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:6:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221674Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:7:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221681Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:1:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221687Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:2:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221692Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:3:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.221697Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: VDISK[82000000:_:0:4:0]: CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-07-08T12:00:22.222060Z 1 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222077Z 6 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222086Z 7 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222096Z 8 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222104Z 2 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222114Z 3 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222124Z 4 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.222133Z 5 00h00m30.010512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-07-08T12:00:22.243504Z 1 00h01m30.011024s :BS_LOCALRECOVERY CRIT: VDISK[82000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "Some error reason" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TExternalTableTest::ReplaceExternalTableIfNotExists >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::DropExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:22.471844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:22.471866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:22.471871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:22.471876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:22.471890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:22.471893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:22.471901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:22.471918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:22.471985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:22.484903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:22.484924Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:22.488061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:22.488090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:22.488114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:22.489239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:22.489319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:22.489414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.489483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:22.490068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.490100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:22.490289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:22.490296Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.490311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:22.490317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:22.490323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:22.490350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.491298Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:22.508850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.508910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.508998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:22.509039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:22.509049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.509635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.509654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:22.509697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.509712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:22.509716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:22.509720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:22.510000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.510008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:22.510012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:22.510248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.510255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.510260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.510265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.510793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:22.511072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:22.511101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:22.511245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.511263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.511269Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.511330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:22.511337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.511358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:22.511371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:22.511691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:22.511697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:22.511729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.511734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:22.511766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.511772Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:22.511781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:22.511784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.511789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:22.511792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.511796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:22.511800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.511804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:22.511807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:22.511816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:22.511821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:22.511824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:22.512164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:22.512176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... ode 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-07-08T12:00:22.522929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-07-08T12:00:22.523112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-07-08T12:00:22.523187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523209Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 26us result status StatusSuccess 2025-07-08T12:00:22.523265Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523311Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523321Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 12us result status StatusSuccess 2025-07-08T12:00:22.523357Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-07-08T12:00:22.523387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-07-08T12:00:22.523391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-07-08T12:00:22.523404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-07-08T12:00:22.523407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-07-08T12:00:22.523415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-07-08T12:00:22.523418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-07-08T12:00:22.523479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-07-08T12:00:22.523497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:346:2337] 2025-07-08T12:00:22.523514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-07-08T12:00:22.523527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:346:2337] 2025-07-08T12:00:22.523533Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-07-08T12:00:22.523546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:346:2337] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-07-08T12:00:22.523600Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:22.523612Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 13us result status StatusSuccess 2025-07-08T12:00:22.523647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-07-08T12:00:22.524189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.524218Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-07-08T12:00:22.524225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-07-08T12:00:22.524238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, at schemeshard: 72057594046678944 2025-07-08T12:00:22.524577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-07-08T12:00:22.524596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132, operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] |66.9%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TS3WrapperTests::UploadUnknownPart >> TExternalTableTest::ReadOnlyMode >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:22.824082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:22.824106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:22.824111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:22.824115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:22.824129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:22.824133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:22.824141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:22.824157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:22.824226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:22.836798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:22.836820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:22.840501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:22.840545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:22.840570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:22.841992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:22.842086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:22.842192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.842278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:22.842949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.842985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:22.843199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:22.843210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.843228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:22.843234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:22.843240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:22.843270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.844382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:22.864036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.864101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.864156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:22.864201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:22.864211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.864897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.864923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:22.864992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.865013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:22.865017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:22.865023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:22.865442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.865454Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:22.865459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:22.865806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.865816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.865822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.865828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.866380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:22.866758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:22.866793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:22.866959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:22.866981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.866988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.867060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:22.867067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:22.867091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:22.867103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:22.867503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:22.867511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:22.867544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:22.867550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:22.867586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:22.867592Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:22.867602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:22.867606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.867611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:22.867614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.867618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:22.867624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:22.867628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:22.867632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:22.867642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:22.867647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:22.867651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:22.868028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:22.868046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... _bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-07-08T12:00:22.879649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.879716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-07-08T12:00:22.879728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.879773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-07-08T12:00:22.880169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.880196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-07-08T12:00:22.880754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.880797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-07-08T12:00:22.880807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.880819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-07-08T12:00:22.881268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.881291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-07-08T12:00:22.881784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.881828Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-07-08T12:00:22.881838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.881852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-07-08T12:00:22.882235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.882257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-07-08T12:00:22.882737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.882774Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-07-08T12:00:22.882783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.882796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-07-08T12:00:22.883175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.883198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-07-08T12:00:22.883709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.883753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-07-08T12:00:22.883763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.883788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-07-08T12:00:22.884160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.884181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-07-08T12:00:22.884675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:22.884718Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-07-08T12:00:22.884727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-07-08T12:00:22.884744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166, at schemeshard: 72057594046678944 2025-07-08T12:00:22.885131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:22.885153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:166, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TExternalTableTest::Decimal [GOOD] >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> TS3WrapperTests::HeadUnknownObject |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 693542184718362139 |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> TS3WrapperTests::HeadUnknownObject [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:23.091527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:23.091547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.091552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:23.091556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:23.091570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:23.091573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:23.091581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.091594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:23.091664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:23.105731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:23.105750Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:23.109076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:23.109104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:23.109130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:23.112174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:23.112295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:23.112398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.112485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:23.113254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:23.113496Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:23.113528Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.113534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:23.113568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.114636Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:23.154235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:23.154299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.154354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:23.154393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:23.154402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.159131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.159166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:23.159227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.159247Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:23.159253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:23.159258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:23.159768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.159778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:23.159784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:23.160109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.160117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.160123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.160129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.160926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:23.161352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:23.161387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:23.161542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.161562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.161568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.161639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:23.161646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.161671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:23.161682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:23.162071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.162078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.162113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.162118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:23.162152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.162158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:23.162168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.162173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.162178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.162181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.162188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:23.162193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.162197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:23.162201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:23.162211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.162216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:23.162221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:23.162627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:23.162644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:00:23.178251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:23.178375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.178385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.178390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:23.178394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-07-08T12:00:23.178397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:23.178407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-07-08T12:00:23.178826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-07-08T12:00:23.178846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-07-08T12:00:23.178983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.178999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.179006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-07-08T12:00:23.179021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-07-08T12:00:23.179040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.179047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:23.179154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:23.179438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-07-08T12:00:23.179657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.179664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.179683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:23.179692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:23.179705Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.179710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-07-08T12:00:23.179715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T12:00:23.179719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T12:00:23.179756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.179762Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-07-08T12:00:23.179770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:23.179773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:23.179777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:23.179780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:23.179784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T12:00:23.179788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:23.179792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T12:00:23.179795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T12:00:23.179804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:23.179807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:23.179811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-07-08T12:00:23.179815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T12:00:23.179818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 4 2025-07-08T12:00:23.179915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.179926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.179931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:23.179935Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T12:00:23.179938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:23.180049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.180059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:23.180066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:23.180070Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-07-08T12:00:23.180073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:23.180082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-07-08T12:00:23.180539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:23.180769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T12:00:23.180845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T12:00:23.180851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T12:00:23.180903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:00:23.180917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T12:00:23.180920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:390:2381] TestWaitNotification: OK eventTxId 104 2025-07-08T12:00:23.181010Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:23.181038Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 33us result status StatusSuccess 2025-07-08T12:00:23.181086Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:23.120184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:23.120207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.120213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:23.120217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:23.120233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:23.120238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:23.120247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.120269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:23.120341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:23.145424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:23.145443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:23.150208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:23.150234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:23.150263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:23.151545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:23.151637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:23.151746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.151819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:23.153652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.153693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:23.153916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.153925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.153943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:23.153950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.153955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:23.153987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.155189Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:23.179892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:23.179941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.179989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:23.180024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:23.180032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.180609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.180635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:23.180678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.180698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:23.180702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:23.180707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:23.181115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.181128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:23.181133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:23.181481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.181492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.181498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.181504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.182281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:23.182644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:23.182678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:23.182836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.182858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.182867Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.182940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:23.182947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.182969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:23.182980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:23.183341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.183348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.183381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.183385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:23.183419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.183425Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:23.183435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.183439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.183443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.183446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.183450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:23.183455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.183459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:23.183463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:23.183473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.183478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:23.183482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:23.183839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:23.183855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... shToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.189477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-07-08T12:00:23.189485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:00:23.189490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:00:23.189549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.189556Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:00:23.189565Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:23.189569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.189575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:23.189583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.189589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-07-08T12:00:23.189597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.189606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:00:23.189612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:00:23.189627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:23.189634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-07-08T12:00:23.189638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-07-08T12:00:23.189641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:00:23.189821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.189840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.189845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:23.189849Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:00:23.189854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.190184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.190199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.190203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:23.190207Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:00:23.190211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:23.190225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-07-08T12:00:23.190640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:23.190853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T12:00:23.190905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-07-08T12:00:23.190912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-07-08T12:00:23.190982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T12:00:23.191001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:00:23.191007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2297] TestWaitNotification: OK eventTxId 101 2025-07-08T12:00:23.191070Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:23.191097Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 36us result status StatusSuccess 2025-07-08T12:00:23.191198Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-07-08T12:00:23.192136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:23.192210Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-07-08T12:00:23.192225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-07-08T12:00:23.192233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-07-08T12:00:23.192861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.192903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:00:23.192985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:00:23.192994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:00:23.193074Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:23.193096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:23.193103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:314:2305] TestWaitNotification: OK eventTxId 102 2025-07-08T12:00:23.193186Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:23.193219Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 38us result status StatusPathDoesNotExist 2025-07-08T12:00:23.193263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalTableTest::ReadOnlyMode [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::MultipartUpload >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-07-08T12:00:23.657148Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 1778E9D3-EFEB-44CC-BF62-2A97984944AD, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:6385 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7225D7BC-7828-441D-95CA-2E7F6E043E55 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-07-08T12:00:23.658598Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 1778E9D3-EFEB-44CC-BF62-2A97984944AD, response# No response body. >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 13620512001434941112 2025-07-08T12:00:22.585651Z 1 00h01m14.361536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-07-08T12:00:22.585978Z 1 00h01m14.361536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10560025873039007611] 2025-07-08T12:00:22.587061Z 1 00h01m14.361536s :BS_SYNCER ERROR: VDISK[82000000:_:0:0:0]: THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-07-08T12:00:23.645222Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 3FB38FF5-4006-4AA0-BBB9-D29FFCB5DF69, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:13924 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0C9DBB7A-110C-4052-B198-C766E276709D amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-07-08T12:00:23.646551Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 3FB38FF5-4006-4AA0-BBB9-D29FFCB5DF69, response# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:23.643961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:23.643987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.643992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:23.643996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:23.644011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:23.644015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:23.644024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.644040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:23.644120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:23.658608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:23.658632Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:23.662895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:23.662934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:23.662967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:23.664702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:23.664810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:23.664923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.665025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:23.665794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.665839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:23.666068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.666089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.666107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:23.666114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.666120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:23.666153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.667510Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:23.686141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:23.686222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.686293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:23.686341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:23.686353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:23.687232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:23.687257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:23.687262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:23.687659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687680Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:23.687978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687987Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.687992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.687999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.688545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:23.688889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:23.688927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:23.689118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.689141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.689151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.689226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:23.689232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.689260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:23.689271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:23.690730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.690744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.690780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.690784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:23.690817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.690822Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:23.690831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.690833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.690844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.690846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.690849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:23.690852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.690855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:23.690857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:23.690866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.690870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:23.690872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:23.691156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:23.691166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:23.832294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:00:23.832307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T12:00:23.840145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-07-08T12:00:23.840215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-07-08T12:00:23.840288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.840295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.840347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T12:00:23.840369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.840376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-07-08T12:00:23.840382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-07-08T12:00:23.840444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.840452Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:23.840466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-07-08T12:00:23.840495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:23.840713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.840731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.840735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-07-08T12:00:23.840740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:00:23.840747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-07-08T12:00:23.840914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.840924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.840927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-07-08T12:00:23.840931Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-07-08T12:00:23.840935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T12:00:23.840945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-07-08T12:00:23.841614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-07-08T12:00:23.841657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-07-08T12:00:23.841856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-07-08T12:00:23.841873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-07-08T12:00:23.841915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.841934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.841941Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T12:00:23.841972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-07-08T12:00:23.842002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:00:23.842010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T12:00:23.843173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.843188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.843279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T12:00:23.843292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.843297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-07-08T12:00:23.843301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 5 FAKE_COORDINATOR: Erasing txId 129 2025-07-08T12:00:23.843348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.843355Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-07-08T12:00:23.843365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-07-08T12:00:23.843369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-07-08T12:00:23.843373Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-07-08T12:00:23.843376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-07-08T12:00:23.843380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-07-08T12:00:23.843384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-07-08T12:00:23.843388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-07-08T12:00:23.843392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-07-08T12:00:23.843404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T12:00:23.843409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-07-08T12:00:23.843423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-07-08T12:00:23.843426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-07-08T12:00:23.843535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.843544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.843548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-07-08T12:00:23.843553Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T12:00:23.843557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-07-08T12:00:23.843660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.843668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-07-08T12:00:23.843674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-07-08T12:00:23.843678Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-07-08T12:00:23.843681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T12:00:23.843689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-07-08T12:00:23.844366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-07-08T12:00:23.844386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-07-08T12:00:23.992521Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 5F9518FA-455F-460F-949F-14979761526F, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:64844 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AB4718F3-3E4A-4376-84F6-C195523E7426 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-07-08T12:00:24.003191Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 5F9518FA-455F-460F-949F-14979761526F, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-07-08T12:00:24.005082Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BBFD766B-F086-49A1-A1DA-26F2501D3D20, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64844 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EE686233-BC3C-4B69-926A-7AB83ADF5268 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-07-08T12:00:24.008222Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BBFD766B-F086-49A1-A1DA-26F2501D3D20, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-07-08T12:00:24.008400Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BC53491D-64AC-4ADE-B9EE-4EF5D409F044, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:64844 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3F90CC85-5597-4EAE-9BEA-03060978A3A5 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-07-08T12:00:24.014875Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BC53491D-64AC-4ADE-B9EE-4EF5D409F044, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-07-08T12:00:24.015021Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 8A952F21-056C-4F4F-9333-94EE2BB2321B, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:64844 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 71D60E3A-78A9-407C-9156-4C4A637BBD3D amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-07-08T12:00:24.025808Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 8A952F21-056C-4F4F-9333-94EE2BB2321B, response# GetObjectResult { } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-07-08T12:00:23.428971Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 73C68183-39F6-42FE-B326-3D3A7420C094, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:21907 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DFA8F542-A332-48FF-8131-84A546A9ECB9 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-07-08T12:00:23.433564Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 73C68183-39F6-42FE-B326-3D3A7420C094, response# ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:129:2058] recipient: [1:110:2142] 2025-07-08T12:00:23.098384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:23.098407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.098412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:23.098417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:23.098432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:23.098436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:23.098444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:23.098461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:23.098539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:23.107926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:23.107942Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:23.110967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:23.110996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:23.111019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:23.112280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:23.112368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:23.112459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.112547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:23.113621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:23.113868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.113901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:23.113907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.113912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:23.113940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.115075Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2154] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:23.136123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:23.136208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.136282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:23.136336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:23.136358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.138784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.138813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:23.138867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.138887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:23.138891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:23.138895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:23.139596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.139607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:23.139610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:23.145211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.145235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.145243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.145252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.145798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:23.149142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:23.149193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:23.149381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.149414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.149422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.149519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:23.149528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:23.149564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:23.149577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:23.150233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.150242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.150287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.150292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:23.150333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.150340Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:23.150350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.150354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.150359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:23.150361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.150365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:23.150370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:23.150374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:23.150378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:23.150389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.150395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:23.150399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:23.150830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:23.150850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Vers ... ATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-07-08T12:00:23.495731Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:23.495750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936750 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:23.495757Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-07-08T12:00:23.495777Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-07-08T12:00:23.495798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:23.495806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:23.495812Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:23.496013Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:23.496148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:00:23.496348Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:23.496354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:23.496373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:23.496390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:23.496397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:23.496411Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:23.496415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-07-08T12:00:23.496419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-07-08T12:00:23.496423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-07-08T12:00:23.496426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:00:23.496434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:23.496440Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:00:23.496450Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:23.496453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.496458Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:23.496461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.496479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-07-08T12:00:23.496484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:23.496488Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:00:23.496492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:00:23.496501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:23.496505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:23.496509Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-07-08T12:00:23.496513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-07-08T12:00:23.496517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:00:23.496520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:00:23.496752Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.496764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.496769Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:23.496775Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T12:00:23.496779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:23.496998Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497014Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:23.497018Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:23.497022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:23.497135Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497148Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:23.497152Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:00:23.497155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:23.497164Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-07-08T12:00:23.497410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:23.497691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-07-08T12:00:23.497735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-07-08T12:00:23.497742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-07-08T12:00:23.497803Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T12:00:23.497819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:00:23.497824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:334:2325] TestWaitNotification: OK eventTxId 101 2025-07-08T12:00:23.497901Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:23.497930Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 50us result status StatusSuccess 2025-07-08T12:00:23.498017Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 11615265857366576777 >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> TS3WrapperTests::HeadObject >> TS3WrapperTests::HeadObject [GOOD] >> ViewerTopicDataTests::TopicDataTest [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false |67.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> TS3WrapperTests::CopyPartUpload >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TS3WrapperTests::AbortUnknownUpload >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> TS3WrapperTests::CopyPartUpload [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: Build = 0.2463229299 Merge = 0.9940554859 Destroy = 0.04841845366 Data has built Merge = 0.05351797714 Data has merged 2025-07-08T12:00:09.054853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:09.055125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:09.055266Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:09.055597Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:09.056703Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:09.176590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:09.305934Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:09.345085Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:09.535649Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17100, node 1 TClient is connected to server localhost:15662 2025-07-08T12:00:09.666585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:09.666603Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:09.666607Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:09.666697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Request timer = 10.50147547 BASE_PERF = 1.560420556 test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:22.141199Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:22.155403Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17194, node 6 2025-07-08T12:00:22.184649Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:22.184661Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:22.184663Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:22.184702Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8666 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:00:22.201199Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:22.201230Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:22.205302Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:22.221347Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:22.225147Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:22.231359Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:00:22.232120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:22.576805Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:22.576821Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:22.629491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:22.701140Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:22.701154Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-07-08T12:00:22.765453Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-07-08T12:00:22.765465Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000fdd/r3tmp/tmpENk3MI/pdisk_1.dat 2025-07-08T12:00:23.268985Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:23.276271Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11707, node 7 2025-07-08T12:00:23.296974Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:23.296987Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:23.296989Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:23.297034Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23753 PQClient connected to localhost:11707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:23.353126Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:23.353149Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:23.353427Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:23.354336Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:23.361038Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:23.380102Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-07-08T12:00:23.381154Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:23.440910Z :DEBUG: [] MessageGroupId [producer1] SessionId [] Write session: try to update token 2025-07-08T12:00:23.441124Z :INFO: [] MessageGroupId [producer1] SessionId [] Write session: Do CDS request 2025-07-08T12:00:23.441130Z :INFO: [] MessageGroupId [producer1] SessionId [] Start write session. Will connect to endpoint: localhost:11707 2025-07-08T12:00:23.447198Z :DEBUG: [] MessageGroupId [producer1] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer1" } 2025-07-08T12:00:23.452473Z :INFO: [] MessageGroupId [producer1] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976023452 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:23.452503Z :INFO: [] MessageGroupId [producer1] SessionId [] Write session established. Init response: session_id: "producer1|830098e9-1c87e34-44cc06d8-702637e5_0" topic: "topic1" 2025-07-08T12:00:23.454750Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 1 to 1 2025-07-08T12:00:23.454792Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 2 to 2 2025-07-08T12:00:23.454802Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 3 to 3 2025-07-08T12:00:23.454807Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 4 to 4 2025-07-08T12:00:23.454812Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 5 to 5 2025-07-08T12:00:23.454816Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 messages with Id from 6 to 6 2025-07-08T12:00:23.454821Z :DEBUG: [] MessageGroupId [producer1] SessionId [producer1|830098e9-1c87e34-44cc06d8-702637e5_0] Write 1 mess ... ts: 46 already_written: false write_statistics { persist_duration_ms: 3 } 2025-07-08T12:00:23.941059Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 7 2025-07-08T12:00:23.950069Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.950085Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 2 message(s) (11 left), first sequence number is 8 2025-07-08T12:00:23.950846Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.950859Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (10 left), first sequence number is 10 2025-07-08T12:00:23.953187Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 8 sequence_numbers: 9 offsets: 47 offsets: 48 already_written: false already_written: false write_statistics { } 2025-07-08T12:00:23.953197Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 8 2025-07-08T12:00:23.953202Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 9 2025-07-08T12:00:23.957091Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 10 offsets: 49 already_written: false write_statistics { } 2025-07-08T12:00:23.957102Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 10 2025-07-08T12:00:23.970897Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.970916Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 3 message(s) (7 left), first sequence number is 11 2025-07-08T12:00:23.973160Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.973171Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (6 left), first sequence number is 14 2025-07-08T12:00:23.978138Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.978152Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (5 left), first sequence number is 15 2025-07-08T12:00:23.979683Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.979695Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (4 left), first sequence number is 16 2025-07-08T12:00:23.981048Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 11 sequence_numbers: 12 sequence_numbers: 13 offsets: 50 offsets: 51 offsets: 52 already_written: false already_written: false already_written: false write_statistics { } 2025-07-08T12:00:23.981066Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 11 2025-07-08T12:00:23.981072Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 12 2025-07-08T12:00:23.981075Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 13 2025-07-08T12:00:23.985398Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.985408Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 2 message(s) (2 left), first sequence number is 17 2025-07-08T12:00:23.991153Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.991166Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (1 left), first sequence number is 19 2025-07-08T12:00:23.992304Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 14 offsets: 53 already_written: false write_statistics { } 2025-07-08T12:00:23.992313Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 14 2025-07-08T12:00:23.992343Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 15 offsets: 54 already_written: false write_statistics { } 2025-07-08T12:00:23.992345Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 15 2025-07-08T12:00:23.994130Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: try to update token 2025-07-08T12:00:23.994140Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Send 1 message(s) (0 left), first sequence number is 20 2025-07-08T12:00:23.994431Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 16 offsets: 55 already_written: false write_statistics { } 2025-07-08T12:00:23.994438Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 16 2025-07-08T12:00:24.005874Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 17 sequence_numbers: 18 offsets: 56 offsets: 57 already_written: false already_written: false write_statistics { persist_duration_ms: 4 } 2025-07-08T12:00:24.005891Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 17 2025-07-08T12:00:24.005898Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 18 2025-07-08T12:00:24.007768Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 19 offsets: 58 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-07-08T12:00:24.007779Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 19 2025-07-08T12:00:24.007821Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-07-08T12:00:24.007825Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: acknoledged message 20 2025-07-08T12:00:24.033013Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session will now close 2025-07-08T12:00:24.033038Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: aborting 2025-07-08T12:00:24.033245Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:00:24.033260Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|d1368d57-948d93f7-2fc19a69-dd5f45f0_0] Write session: destroy 2025-07-08T12:00:24.249422Z node 7 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-07-08T12:00:24.339131Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-07-08T12:00:24.339287Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-07-08T12:00:24.339293Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:11707 2025-07-08T12:00:24.340299Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-07-08T12:00:24.343610Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976024343 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:24.343636Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0" topic: "topic1" 2025-07-08T12:00:24.344109Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write 1 messages with Id from 1 to 1 2025-07-08T12:00:24.344144Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: close. Timeout = 18446744073709551 ms 2025-07-08T12:00:24.374669Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: try to update token 2025-07-08T12:00:24.374689Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Send 1 message(s) (0 left), first sequence number is 1 2025-07-08T12:00:24.382641Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 5 } 2025-07-08T12:00:24.382658Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: acknoledged message 1 2025-07-08T12:00:24.444977Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session will now close 2025-07-08T12:00:24.444999Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: aborting 2025-07-08T12:00:24.445211Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:00:24.445232Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|f838dca2-2b873b69-d855e8e0-7dbd7bae_0] Write session: destroy Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-07-08T12:00:24.553761Z node 7 :PERSQUEUE ERROR: [PQ: 72075186224037889, Partition: 0, State: StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 Got response:400: Bad offset ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-07-08T12:00:24.902531Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 63DC44C7-7B16-4552-ABA7-A878B763C52A, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:1971 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 909DC4ED-83A7-4FE0-B222-6B5333E955D3 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-07-08T12:00:24.903812Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 63DC44C7-7B16-4552-ABA7-A878B763C52A, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-07-08T12:00:24.909072Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 448C4C53-A6AB-49F8-A400-29AFBF0B34C1, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:1971 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B3851E5D-D153-42BE-8687-28BA453604C5 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-07-08T12:00:24.909754Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 448C4C53-A6AB-49F8-A400-29AFBF0B34C1, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } >> TS3WrapperTests::AbortUnknownUpload [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus |67.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |67.1%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus |67.1%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.1%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags |67.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-07-08T12:00:25.351232Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 72898E13-299F-4407-A422-E9AB2EB55775, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:14696 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2F4ABBB0-C78D-42E5-B9DE-BFEE85840B06 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-07-08T12:00:25.366422Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 72898E13-299F-4407-A422-E9AB2EB55775, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-07-08T12:00:25.366569Z node 1 :S3_WRAPPER NOTICE: Request: uuid# C5A47D10-C137-4942-A826-AD2938ACF1BB, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:14696 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9040ECC3-5B73-4FF3-B76E-C2FD5BF65FB5 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-07-08T12:00:25.370334Z node 1 :S3_WRAPPER NOTICE: Response: uuid# C5A47D10-C137-4942-A826-AD2938ACF1BB, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-07-08T12:00:25.370428Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B3ACF864-2F74-4DFE-8278-ED1E482447B4, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14696 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5FB9CFEF-FFFA-4E8D-9C57-EF222D7DE66E amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-07-08T12:00:25.381597Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B3ACF864-2F74-4DFE-8278-ED1E482447B4, response# UploadPartCopyResult { } 2025-07-08T12:00:25.381745Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BF49A930-4959-4F4A-93EE-4ADAAB4EF0CD, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:14696 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E8BA49C1-DA1B-456B-A898-FC55A554B976 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-07-08T12:00:25.390962Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BF49A930-4959-4F4A-93EE-4ADAAB4EF0CD, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-07-08T12:00:25.391072Z node 1 :S3_WRAPPER NOTICE: Request: uuid# EFD1D5ED-7C90-41D7-B5D4-1A338A205CAF, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:14696 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 320EAD96-B02C-47DA-8230-1D7638096EE7 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-07-08T12:00:25.391702Z node 1 :S3_WRAPPER NOTICE: Response: uuid# EFD1D5ED-7C90-41D7-B5D4-1A338A205CAF, response# GetObjectResult { } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-07-08T12:00:25.500795Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 67366696-A8B3-4195-9AC5-85DF3E7F9FC7, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:12036 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C9EB5821-0233-4C97-8345-32D2C89D8D3B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-07-08T12:00:25.502373Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 67366696-A8B3-4195-9AC5-85DF3E7F9FC7, response# >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:02.026874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:02.026908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.026913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:02.026918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:02.026930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:02.026934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:02.026943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:02.026968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:02.027034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:02.044593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:02.044613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:02.054237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:02.054287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:02.054313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:02.056369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:02.056429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:02.056521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.056667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:02.057342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.057384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:02.057590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.057599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.057614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:02.057620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.057626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:02.057660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.059825Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:02.078282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:02.078348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.078402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:02.078445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:02.078455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:02.079159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:02.079172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:02.079176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:02.079480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079491Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:02.079747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.079761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.079767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.080380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:02.080715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:02.080749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:02.080901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:02.080919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:02.080925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.080993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:02.080999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:02.081024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:02.081034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:02.085313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:02.085326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:02.085369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:02.085374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:02.085384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:02.085390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:02.085402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.085408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.085413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:02.085415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.085420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:02.085425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:02.085429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:02.085433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:02.085445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:02.085451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:02.085455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:02.085855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:02.085866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:25.350326Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-07-08T12:00:25.351794Z node 11 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:25.351805Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:25.351847Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:25.351882Z node 11 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:25.351887Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [11:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-07-08T12:00:25.351893Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [11:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:00:25.351968Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.351974Z node 11 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-07-08T12:00:25.352220Z node 11 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:25.352233Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:25.352237Z node 11 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:25.352242Z node 11 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:00:25.352248Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:25.352373Z node 11 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:25.352381Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:00:25.352385Z node 11 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:00:25.352389Z node 11 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-07-08T12:00:25.352392Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T12:00:25.352404Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:00:25.352494Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } 2025-07-08T12:00:25.352500Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:25.352515Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } 2025-07-08T12:00:25.352527Z node 11 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 273 } } 2025-07-08T12:00:25.352672Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 47244642551 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:25.352678Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:25.352690Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 308 RawX2: 47244642551 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:25.352695Z node 11 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-07-08T12:00:25.352703Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 308 RawX2: 47244642551 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-07-08T12:00:25.352712Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:25.352717Z node 11 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.352721Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-07-08T12:00:25.352727Z node 11 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-07-08T12:00:25.359393Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:25.359427Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:00:25.359460Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.359484Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.359560Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.359568Z node 11 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:00:25.359586Z node 11 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:25.359590Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:25.359595Z node 11 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:00:25.359598Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:25.359603Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-07-08T12:00:25.359622Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [11:334:2312] message: TxId: 101 2025-07-08T12:00:25.359630Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:00:25.359636Z node 11 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:00:25.359640Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:00:25.359664Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:25.360073Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:00:25.360082Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [11:335:2313] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-07-08T12:00:25.360803Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "added" Type: "pgint4" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:25.360842Z node 11 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:00:25.360924Z node 11 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Type 'pgint4' specified for column 'added', but support for pg types is disabled (EnableTablePgTypes feature flag is off), at schemeshard: 72057594046678944 2025-07-08T12:00:25.361465Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Type \'pgint4\' specified for column \'added\', but support for pg types is disabled (EnableTablePgTypes feature flag is off)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:25.361495Z node 11 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Type 'pgint4' specified for column 'added', but support for pg types is disabled (EnableTablePgTypes feature flag is off), operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:00:25.361544Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:00:25.361550Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:00:25.361607Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:25.361623Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:25.361627Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [11:385:2356] TestWaitNotification: OK eventTxId 102 >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> KqpScanArrowInChanels::AllTypesColumns |67.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> Viewer::SimpleFeatureFlags [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] |67.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> FolderServiceTest::TFolderService >> FolderServiceTest::TFolderServiceTransitional >> FolderServiceTest::TFolderServiceAdapter |67.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 1.845643653 Build = 5.877410886 Merge = 7.249583804 Destroy = 2.613089949 2025-07-08T12:00:18.117877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 64442, node 1 TClient is connected to server localhost:4099 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":1},"NodeId":1,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"10","AvailableSize":"90","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[1],"Overall":"Red"}]} 2025-07-08T12:00:19.283581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18434, node 2 TClient is connected to server localhost:22867 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":2},"NodeId":2,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"90","AvailableSize":"10","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[2],"Overall":"Red"}]} 2025-07-08T12:00:20.536665Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2334, node 3 TClient is connected to server localhost:28002 json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-07-08T12:00:21.884205Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 29584, node 4 TClient is connected to server localhost:15772 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":4},"NodeId":4,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"10","AvailableSize":"90","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[4],"Overall":"Red"}]} 2025-07-08T12:00:23.090956Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7924, node 5 TClient is connected to server localhost:21066 json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-07-08T12:00:24.208222Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 26292, node 6 TClient is connected to server localhost:25367 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.8","Used":"80","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":6},"NodeId":6,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"80","AvailableSize":"20","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[6],"Overall":"Red"}]} 2025-07-08T12:00:25.610538Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 10528, node 7 TClient is connected to server localhost:23389 json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":7},"NodeId":7,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"90","AvailableSize":"10","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[7],"Overall":"Red"}]} 2025-07-08T12:00:26.179495Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7524679736213278455:2064];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:26.181045Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:26.209206Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24694, node 8 2025-07-08T12:00:26.221182Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:26.221193Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:26.221195Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:26.221245Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11356 2025-07-08T12:00:26.285202Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:26.285240Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:26.287679Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected >> TUserAccountServiceTest::Get >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> TUserAttrsTestWithReboots::InSubdomain [GOOD] >> TServiceAccountServiceTest::Get [GOOD] >> TAccessServiceTest::Authenticate |67.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> TAccessServiceTest::PassRequestId [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:03.704916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:03.704977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:03.704985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:03.704990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:03.705004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:03.705008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:03.705017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:03.705029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:03.705098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:03.724694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:03.724717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:03.728869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:03.728926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:03.728968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:03.730599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:03.730669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:03.730754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.730953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:03.731873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.731922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:03.732177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:03.732190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.732207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:03.732215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:03.732222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:03.732247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.735033Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:03.769794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:03.769868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.769923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:03.769969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:03.769980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.773240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.773272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:03.773316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.773326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:03.773332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:03.773341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:03.773803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.773817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:03.773823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:03.774225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.774235Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.774241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.774248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.774855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:03.775278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:03.775320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:03.775489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:03.775512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:03.775518Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.775581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:03.775589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:03.775617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:03.775630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:03.776037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:03.776115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:03.776171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:03.776177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:03.776190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:03.776199Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:03.776209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:03.776214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.776218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:03.776222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.776226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:03.776231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:03.776236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:03.776239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:03.776250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:03.776256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:03.776261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:03.776669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:03.776683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... HARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:27.210346Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:27.210350Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:00:27.210354Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T12:00:27.210358Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T12:00:27.210418Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:27.210426Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:00:27.210432Z node 16 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:00:27.210436Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T12:00:27.210440Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:27.210448Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-07-08T12:00:27.211017Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.211028Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.211032Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.211036Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.211077Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:00:27.211120Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:00:27.211171Z node 16 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-07-08T12:00:27.211209Z node 16 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T12:00:27.211230Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T12:00:27.211279Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-07-08T12:00:27.211485Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:27.211519Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-07-08T12:00:27.211653Z node 16 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-07-08T12:00:27.228037Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-07-08T12:00:27.228172Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:27.228921Z node 16 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T12:00:27.229004Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T12:00:27.229062Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2025-07-08T12:00:27.233816Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:00:27.233837Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:00:27.233862Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:27.235047Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T12:00:27.235065Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-07-08T12:00:27.235213Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T12:00:27.235219Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T12:00:27.235231Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-07-08T12:00:27.235235Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-07-08T12:00:27.235653Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T12:00:27.235665Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T12:00:27.235716Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-07-08T12:00:27.235771Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:00:27.235778Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T12:00:27.235849Z node 16 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:00:27.235870Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:00:27.235876Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [16:532:2486] TestWaitNotification: OK eventTxId 103 2025-07-08T12:00:27.235953Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:27.235989Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 53us result status StatusPathDoesNotExist 2025-07-08T12:00:27.236025Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-07-08T12:00:27.236088Z node 16 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-07-08T12:00:27.236099Z node 16 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-07-08T12:00:27.236107Z node 16 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-07-08T12:00:27.236116Z node 16 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-07-08T12:00:27.236180Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:27.236207Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 29us result status StatusSuccess 2025-07-08T12:00:27.236290Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAccessServiceTest::Authenticate [GOOD] >> TUserAccountServiceTest::Get [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-07-08T12:00:27.817190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679742156815963:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:27.819423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011f9/r3tmp/tmpVuPKWT/pdisk_1.dat 2025-07-08T12:00:27.873076Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:15423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:27.953212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:27.953239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:27.953969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.955370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:27.957319Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:28.025325Z node 1 :GRPC_CLIENT DEBUG: [75dff950bf0]{trololo} Connect to grpc://localhost:14056 2025-07-08T12:00:28.025799Z node 1 :GRPC_CLIENT DEBUG: [75dff950bf0]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-07-08T12:00:28.037199Z node 1 :GRPC_CLIENT DEBUG: [75dff950bf0]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-07-08T12:00:26.305776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001190/r3tmp/tmpG2nUI0/pdisk_1.dat 2025-07-08T12:00:26.445604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.464673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:26.499889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:26.499922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:26.511239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:26.603140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.623087Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:26.623156Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:26.636377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:26.636413Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:26.636558Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:26.636566Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:26.636574Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:26.636644Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:26.636663Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:26.636674Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:26.650444Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:26.654495Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:26.654561Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:26.654581Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:26.654586Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:26.654591Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:26.654595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.654760Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:26.654781Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:26.654797Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.654802Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.654810Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:26.654815Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.654909Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:26.654941Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.654989Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:26.655003Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:26.655267Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:26.668180Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.668223Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:26.841539Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:26.842312Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:26.842324Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.842422Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.842429Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:26.842437Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:26.842488Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:26.842515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:26.842591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.842602Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:26.842900Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:26.842990Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.843205Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:26.843211Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.843343Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:26.843352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.843518Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.843524Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:26.843529Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:26.843543Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:26.843553Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:26.843561Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.845478Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:26.845744Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:26.845757Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:26.845895Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:26.920790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyh8pz0xx5ftpqryb6aa27, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTlmYjI2ZjktZmIyZTRhYmEtN2ViOTUxODAtMmU0NGNkODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:26.921968Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:26.922042Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.947693Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.947745Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.948795Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:26.949081Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:26.973193Z node 1 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:26.973223Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.973284Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:26.973292Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:26.973375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.973384Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.973392Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:26.973406Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.973425Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:26.973655Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.973738Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.973784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.973789Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:26.973795Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:26.973833Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:26.973839Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.973975Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025 ... 2025-07-08T12:00:27.911829Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:27.911869Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:27.911989Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:27.911996Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:27.912001Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:27.912044Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:27.912064Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:27.912075Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:620:2520] in generation 1 2025-07-08T12:00:27.924521Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:27.924548Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:27.924575Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:27.924587Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:622:2530] 2025-07-08T12:00:27.924592Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:27.924597Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:27.924601Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:27.924725Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:27.924746Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:27.924843Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:27.924849Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:27.924857Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:27.924862Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:27.924873Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:600:2517], serverId# [2:611:2524], sessionId# [0:0:0] 2025-07-08T12:00:27.924898Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:27.924968Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:27.924986Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:27.925283Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:27.937191Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:27.937233Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:28.101238Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:640:2542], serverId# [2:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:28.101394Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:28.101403Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:28.101432Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:28.101439Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:28.101448Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:28.101512Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:28.101538Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:28.101776Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:28.101790Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:28.101879Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:28.101956Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:28.102314Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:28.102322Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:28.102445Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:28.102455Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:28.102617Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:28.102624Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:28.102630Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:28.102646Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:28.102654Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:28.102675Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:28.102834Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:28.103192Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:28.103200Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:28.103255Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:28.116601Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyh9y88jx9mfpww89a1ady, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzQxMjNiMjEtZmM1ZDllYi0zNmZkMmVhYS0yMzgyOGUxZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:28.116746Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:685:2577], serverId# [2:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:28.116792Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:28.137553Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:28.137600Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:28.138744Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:28.138975Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:28.160471Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:28.160498Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:28.160558Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:28.160564Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:28.160644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:28.160651Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:28.160667Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:28.160677Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:28.160689Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:693:2584], serverId# [2:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:28.160858Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:28.160925Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:28.160975Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:28.160981Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:28.160987Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:28.161025Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:28.161031Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:28.161158Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T12:00:28.161215Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:28.161241Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T12:00:28.161246Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 0 2025-07-08T12:00:28.161309Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:28.161315Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-07-08T12:00:28.161338Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:28.161343Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:28.161350Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-07-08T12:00:28.161376Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:28.161383Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:28.161387Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> FolderServiceTest::TFolderService [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-07-08T12:00:27.902091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679743382909120:2224];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:27.932432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001223/r3tmp/tmphzkwR9/pdisk_1.dat 2025-07-08T12:00:27.954847Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.032001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.032036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:28.032749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.033235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:28.229251Z node 1 :GRPC_CLIENT DEBUG: [555e3f9515f0] Connect to grpc://localhost:25420 2025-07-08T12:00:28.229443Z node 1 :GRPC_CLIENT DEBUG: [555e3f9515f0] Request ListFoldersRequest { id: "i_am_exists" } 2025-07-08T12:00:28.232613Z node 1 :GRPC_CLIENT DEBUG: [555e3f9515f0] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-07-08T12:00:28.233858Z node 1 :GRPC_CLIENT DEBUG: [555e3f940f70] Connect to grpc://localhost:20265 2025-07-08T12:00:28.233974Z node 1 :GRPC_CLIENT DEBUG: [555e3f940f70] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-07-08T12:00:28.241682Z node 1 :GRPC_CLIENT DEBUG: [555e3f940f70] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-07-08T12:00:28.242070Z node 1 :GRPC_CLIENT DEBUG: [555e3f940f70] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-07-08T12:00:28.242543Z node 1 :GRPC_CLIENT DEBUG: [555e3f940f70] Status 5 Not Found 2025-07-08T12:00:28.242687Z node 1 :GRPC_CLIENT DEBUG: [555e3f9515f0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-07-08T12:00:28.243089Z node 1 :GRPC_CLIENT DEBUG: [555e3f9515f0] Status 5 Not Found >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> TUserAttrsTestWithReboots::Reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-07-08T12:00:28.092156Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679744960991369:2136];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:28.092331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011b9/r3tmp/tmpjQ2d9B/pdisk_1.dat 2025-07-08T12:00:28.184202Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:20932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.225314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.225346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:28.226175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.228903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:28.229085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-07-08T12:00:28.079857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679743645435089:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:28.079884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011c2/r3tmp/tmpbdqVks/pdisk_1.dat 2025-07-08T12:00:28.135519Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.165234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.167791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:00:28.170583Z node 1 :GRPC_CLIENT DEBUG: [46683f950970] Connect to grpc://localhost:28164 2025-07-08T12:00:28.171003Z node 1 :GRPC_CLIENT DEBUG: [46683f950970] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-07-08T12:00:28.178020Z node 1 :GRPC_CLIENT DEBUG: [46683f950970] Status 7 Permission Denied 2025-07-08T12:00:28.180023Z node 1 :GRPC_CLIENT DEBUG: [46683f950970] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-07-08T12:00:28.182640Z node 1 :GRPC_CLIENT DEBUG: [46683f950970] Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2025-07-08T12:00:28.205397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.205423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::InSubdomain [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:55.971866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.971890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.971896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.971901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.971915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.971919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.971927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.971944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.972055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.984829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:55.984852Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.988074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.988114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.988139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.989424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.989541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.989643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.989692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.990079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.990110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.990333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.990341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.990359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.990369Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.990375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.990409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:55.991473Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:56.008001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:56.008081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.008142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:56.008180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:56.008189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.008908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.008929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:56.008987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.009006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:56.009011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:56.009015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:56.009312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.009319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:56.009323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:56.009546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.009553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.009558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.009564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.010070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:56.010342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:56.010380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:56.010557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:56.010576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:56.010582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.010639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:56.010645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:56.010670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:56.010680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:56.010970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:56.010977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:56.011018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:56.011022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:56.011085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:56.011090Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:56.011103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.011107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.011112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:56.011114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:56.011118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:56.011123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T12:00:27.818822Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:00:27.818825Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T12:00:27.818829Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T12:00:27.818859Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-07-08T12:00:27.818865Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-07-08T12:00:27.818869Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-07-08T12:00:27.818872Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-07-08T12:00:27.818978Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.818989Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.818993Z node 71 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:00:27.818997Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-07-08T12:00:27.819005Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:27.819133Z node 71 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.819145Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.819149Z node 71 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:00:27.819153Z node 71 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T12:00:27.819157Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:27.819166Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T12:00:27.819606Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.819617Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.819621Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:27.819775Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.820792Z node 71 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-07-08T12:00:27.820875Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:27.820931Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:27.821010Z node 71 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-07-08T12:00:27.821288Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-07-08T12:00:27.821320Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409548 2025-07-08T12:00:27.821370Z node 71 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T12:00:27.821454Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T12:00:27.821474Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409547 2025-07-08T12:00:27.821651Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:00:27.821660Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:27.821683Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:27.821781Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:00:27.821815Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:00:27.821819Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:27.821827Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:00:27.822197Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T12:00:27.822207Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T12:00:27.822269Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-07-08T12:00:27.822273Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-07-08T12:00:27.822528Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T12:00:27.822535Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T12:00:27.822581Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T12:00:27.822597Z node 71 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409547 TestWaitNotification wait txId: 1004 2025-07-08T12:00:27.822665Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T12:00:27.822670Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-07-08T12:00:27.822721Z node 71 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-07-08T12:00:27.822736Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T12:00:27.822741Z node 71 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [71:545:2499] TestWaitNotification: OK eventTxId 1004 2025-07-08T12:00:27.822800Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:27.822828Z node 71 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/USER_0" took 37us result status StatusPathDoesNotExist 2025-07-08T12:00:27.822863Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirA/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/DirA\' (id: [OwnerId: 72057594046678944, LocalPathId: 2]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/DirA/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/DirA" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T12:00:27.822907Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:27.822920Z node 71 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 14us result status StatusSuccess 2025-07-08T12:00:27.822970Z node 71 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:28.524615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.524651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.524656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.524660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.524665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.524669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.524683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.524694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.524758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.539247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.539268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.542785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.542839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.542891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:28.544277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.544330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.544414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.544584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:28.545441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.545474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.545678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.545688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.545702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.545708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.545714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.545737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.546818Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:28.568913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:28.568984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.569037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:28.569072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:28.569082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.573454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.573491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:28.573543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.573554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:28.573559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:28.573564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:28.574115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.574129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:28.574134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:28.574491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.574502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.574507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.574514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.575118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:28.575532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:28.575572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:28.575748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.575773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.575793Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.575868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:28.575876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.575902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:28.575913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:28.576372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.576382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.576424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.576430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:28.576442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.576449Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:28.576459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.576463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.576467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.576470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.576474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:28.576479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.576483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:28.576487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:28.576498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:28.576503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:28.576507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:28.576888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:28.576907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... " PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.757605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:764:2058] recipient: [1:103:2138] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:767:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:768:2058] recipient: [1:766:2675] Leader for TabletID 72057594046678944 is [1:769:2676] sender: [1:770:2058] recipient: [1:766:2675] 2025-07-08T12:00:28.763337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.763357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.763362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.763367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.763372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.763376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.763385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.763397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.763441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.764711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.765017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.765050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.765068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.765072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.765111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.765178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:00:28.765198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:28.765206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:00:28.765288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-07-08T12:00:28.765317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:28.765357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:28.765360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T12:00:28.765364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:28.765374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:28.765482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.765735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.767279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.767736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.767750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.767958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.767969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.767976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.767995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-07-08T11:59:31.885315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00174c/r3tmp/tmpqynqbW/pdisk_1.dat 2025-07-08T11:59:32.066028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:32.084533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:32.125305Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T11:59:32.125600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:32.125619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:32.125650Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T11:59:32.137414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:59:32.330258Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T11:59:32.330283Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T11:59:32.330322Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:582:2502] 2025-07-08T11:59:32.367679Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T11:59:32.367723Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T11:59:32.367929Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T11:59:32.368592Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T11:59:32.368723Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T11:59:32.368763Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T11:59:32.368778Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T11:59:32.369189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:59:32.369354Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T11:59:32.373166Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T11:59:32.373186Z node 1 :TX_PROXY DEBUG: Actor# [1:582:2502] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T11:59:32.387851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:598:2517], Recipient [1:607:2523]: NKikimr::TEvTablet::TEvBoot 2025-07-08T11:59:32.388114Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:598:2517], Recipient [1:607:2523]: NKikimr::TEvTablet::TEvRestored 2025-07-08T11:59:32.388204Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:607:2523] 2025-07-08T11:59:32.388268Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:32.399784Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:598:2517], Recipient [1:607:2523]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T11:59:32.399950Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:32.399973Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T11:59:32.400134Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T11:59:32.400140Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T11:59:32.400147Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T11:59:32.400196Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T11:59:32.400218Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T11:59:32.400230Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:623:2523] in generation 1 2025-07-08T11:59:32.413264Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T11:59:32.417728Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T11:59:32.417812Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T11:59:32.417838Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:625:2533] 2025-07-08T11:59:32.417843Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T11:59:32.417848Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T11:59:32.417853Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T11:59:32.417919Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:607:2523], Recipient [1:607:2523]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T11:59:32.417926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T11:59:32.418027Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T11:59:32.418052Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T11:59:32.418062Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T11:59:32.418068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T11:59:32.418075Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T11:59:32.418080Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T11:59:32.418084Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T11:59:32.418088Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T11:59:32.418093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T11:59:32.418113Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:614:2527], Recipient [1:607:2523]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T11:59:32.418117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T11:59:32.418124Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:603:2520], serverId# [1:614:2527], sessionId# [0:0:0] 2025-07-08T11:59:32.418229Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:614:2527] 2025-07-08T11:59:32.418236Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T11:59:32.418258Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T11:59:32.418305Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T11:59:32.418315Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T11:59:32.418331Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T11:59:32.418339Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T11:59:32.418343Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T11:59:32.418348Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T11:59:32.418351Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T11:59:32.418421Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T11:59:32.418428Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T11:59:32.418432Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T11:59:32.418435Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T11:59:32.418447Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T11:59:32.418450Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T11:59:32.418454Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T11:59:32.418457Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T11:59:32.418461Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T11:59:32.418710Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:626:2534], Recipient [1:607:2523]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T11:59:32.418742Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T11:59:32.429731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T11:59:32.429761Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T11:59:32.429768Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T11:59:32.429779Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: ... orceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-07-08T12:00:27.421767Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:796:2668], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-07-08T12:00:27.421774Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{26, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-07-08T12:00:27.421778Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{26, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-07-08T12:00:27.421824Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-07-08T12:00:27.421830Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:18} starting compaction 2025-07-08T12:00:27.421858Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:19} starting Scan{1 on 1001, Compact{72075186224037888.1.18, eph 1}} 2025-07-08T12:00:27.421866Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:19} started compaction 1 2025-07-08T12:00:27.421869Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-07-08T12:00:27.422158Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:19} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 18, product {tx status + 1 parts epoch 2} done 2025-07-08T12:00:27.422190Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-07-08T12:00:27.422203Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-07-08T12:00:27.422206Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-07-08T12:00:27.422243Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:00.952000Z 2025-07-08T12:00:27.422252Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:20} Tx{27, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-07-08T12:00:27.422259Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:20} Tx{27, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-07-08T12:00:27.422267Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-07-08T12:00:27.422274Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:796:2668]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-07-08T12:00:27.422331Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:20} Tx{27, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-07-08T12:00:27.422338Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:20} Tx{27, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-07-08T12:00:27.438276Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyh98z2e6gvr63kekt5vaq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=MzllMDIxNTUtZWRjOTNjMzMtNmQ1NmNmY2EtZWZlOThhYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:27.438557Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:739:2619] 2025-07-08T12:00:27.438567Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:739:2619] 2025-07-08T12:00:27.438629Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:822:2676], Recipient [26:625:2531]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-07-08T12:00:27.438655Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-07-08T12:00:27.438663Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-07-08T12:00:27.438673Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-07-08T12:00:27.438683Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1002/281474976715660 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:00:27.438691Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1002/18446744073709551615 2025-07-08T12:00:27.438702Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-07-08T12:00:27.438718Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:27.438723Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-07-08T12:00:27.438728Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:27.438732Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:27.438745Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-07-08T12:00:27.438749Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:27.438756Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:27.438760Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-07-08T12:00:27.438763Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-07-08T12:00:27.438778Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-07-08T12:00:27.438814Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-07-08T12:00:27.438818Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-07-08T12:00:27.438822Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:27.438825Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:27.438838Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:27.438841Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:27.438844Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-07-08T12:00:27.438849Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-07-08T12:00:27.438863Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-07-08T12:00:27.438872Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-07-08T12:00:27.449164Z node 26 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:27.501155Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} commited cookie 8 for step 15 2025-07-08T12:00:27.511525Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:18} commited cookie 8 for step 17 2025-07-08T12:00:27.542241Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:19} commited cookie 8 for step 18 2025-07-08T12:00:27.572850Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{18, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-07-08T12:00:27.572885Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{18, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-07-08T12:00:27.572938Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{18, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{10, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-07-08T12:00:27.572962Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:16} Tx{18, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-07-08T12:00:27.573122Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} commited cookie 1 for step 16 2025-07-08T12:00:27.573176Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:470:2420] 2025-07-08T12:00:27.573180Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:470:2420] 2025-07-08T12:00:27.583563Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:20} commited cookie 8 for step 19 2025-07-08T12:00:27.624425Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-07-08T12:00:27.655204Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-07-08T12:00:27.689083Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-07-08T12:00:27.689118Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-07-08T12:00:27.689173Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{11, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-07-08T12:00:27.689184Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{19, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-07-08T12:00:27.689347Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2025-07-08T12:00:27.689402Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:470:2420] 2025-07-08T12:00:27.689407Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:470:2420] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-07-08T12:00:27.896749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679741922810818:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:27.896762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011de/r3tmp/tmpZOSmZO/pdisk_1.dat 2025-07-08T12:00:27.974871Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.001129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.001154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:28.002497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.034052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.041154Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011de/r3tmp/tmpuWDRGb/pdisk_1.dat 2025-07-08T12:00:28.506310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.508302Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.600780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.600815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:28.601247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.601787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:28.605066Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:28.347527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.347550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.347556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.347561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.347567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.347571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.347585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.347598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.347671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.360929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.360962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.364747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.364793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.364822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:28.366354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.366413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.366508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.366725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:28.367462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.367498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.367715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.367723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.367737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.367743Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.367749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.367773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.368842Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:28.386563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:28.386633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.386698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:28.386738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:28.386748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.387499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.387522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:28.387566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.387573Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:28.387579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:28.387584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:28.387930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.387939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:28.387943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:28.388200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.388206Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.388212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.388219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.388788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:28.389168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:28.389208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:28.389380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.389400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.389420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.389490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:28.389500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.389530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:28.389540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:28.389869Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.389875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.389917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.389921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:28.389930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.389936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:28.389946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.389951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.389955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.389958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.389964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:28.389969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.389974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:28.389978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:28.389986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:28.389992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:28.389996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:28.390337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:28.390348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-07-08T12:00:28.871452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-07-08T12:00:28.871457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T12:00:28.871751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-07-08T12:00:28.871810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T12:00:28.871816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T12:00:28.871869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:00:28.871874Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-07-08T12:00:28.871879Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:00:28.903551Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.903603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.903617Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-07-08T12:00:28.903634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T12:00:28.908896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-07-08T12:00:28.908969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-07-08T12:00:28.908981Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-07-08T12:00:28.908993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.908998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T12:00:28.909045Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-07-08T12:00:28.909080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-07-08T12:00:28.909091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:28.910889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.910973Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.910980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:28.911031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:28.911069Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.911075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-07-08T12:00:28.911080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:206:2208], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-07-08T12:00:28.911172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.911181Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-07-08T12:00:28.911194Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:28.911199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:28.911204Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-07-08T12:00:28.911210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:28.911214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-07-08T12:00:28.911220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-07-08T12:00:28.911225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-07-08T12:00:28.911229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-07-08T12:00:28.911252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:28.911257Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-07-08T12:00:28.911261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-07-08T12:00:28.911264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:00:28.911453Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:28.911464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:28.911468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:28.911473Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-07-08T12:00:28.911477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-07-08T12:00:28.911596Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:28.911605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-07-08T12:00:28.911608Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-07-08T12:00:28.911612Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:28.911615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:28.911624Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-07-08T12:00:28.911628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:409:2377] 2025-07-08T12:00:28.912559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:28.912589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-07-08T12:00:28.912603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T12:00:28.912608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:544:2481] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-07-08T12:00:28.914745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:28.914792Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.914830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-07-08T12:00:28.915217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.915250Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T12:00:28.915298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T12:00:28.915304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T12:00:28.915370Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T12:00:28.915384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T12:00:28.915388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:638:2564] TestWaitNotification: OK eventTxId 105 >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> Describe::Basic >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-07-08T12:00:27.575730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679741572627027:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:27.581077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011e8/r3tmp/tmpzD1qwS/pdisk_1.dat 2025-07-08T12:00:27.688734Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:27.721556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:27.721582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:27.722424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:27.753298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:27.760562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:27.811215Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Connect to grpc://localhost:17449 2025-07-08T12:00:27.813084Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-07-08T12:00:27.825404Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17449: Failed to connect to remote host: Connection refused 2025-07-08T12:00:27.825939Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-07-08T12:00:27.826095Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17449: Failed to connect to remote host: Connection refused 2025-07-08T12:00:28.573255Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:28.829229Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-07-08T12:00:28.834559Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Status 5 Not Found 2025-07-08T12:00:28.834757Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-07-08T12:00:28.839013Z node 1 :GRPC_CLIENT DEBUG: [733bf94edf0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:28.396917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.396942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.396968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.396973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.396978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.396983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.396999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.397012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.397084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.417419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.417441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.427109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.427165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.427191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:28.428546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.428604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.428708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.428838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:28.429566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.429601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.429801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.429809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.429824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.429831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.429836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.429859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.431000Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:28.447334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:28.447400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.447461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:28.447500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:28.447509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.448394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.448433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:28.448489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.448501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:28.448506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:28.448515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:28.449060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.449075Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:28.449081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:28.449449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.449460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.449467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.449473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.450079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:28.450462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:28.450505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:28.450680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.450705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.450726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.450813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:28.450820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.450851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:28.450863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:28.451367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.451376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.451426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.451431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:28.451441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.451450Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:28.451463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.451467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.451471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.451474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.451478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:28.451483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.451487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:28.451491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:28.451502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:28.451508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:28.451512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:28.451895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:28.451910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... :747:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:748:2058] recipient: [1:746:2661] Leader for TabletID 72057594046678944 is [1:749:2662] sender: [1:750:2058] recipient: [1:746:2661] 2025-07-08T12:00:28.671610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.671630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.671636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.671640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.671645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.671652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.671661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.671673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.671718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.672902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.673243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.673284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.673321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.673327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.673350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.673417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:00:28.673441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:28.673449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:00:28.673539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-07-08T12:00:28.673569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:28.673609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:28.673612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T12:00:28.673615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:28.673626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:28.673744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.673987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.674000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.674006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.674012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.674852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.675333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.675347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.675508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.675519Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.675526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.675833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:749:2662] sender: [1:808:2058] recipient: [1:15:2062] 2025-07-08T12:00:28.737321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:28.737409Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 108us result status StatusSuccess 2025-07-08T12:00:28.737573Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-07-08T12:00:27.648995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679741553295624:2062];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:27.649016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001249/r3tmp/tmp7PSOxF/pdisk_1.dat 2025-07-08T12:00:27.714969Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:27.749892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:27.757262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:00:27.789013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:27.789038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:27.789881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:27.806641Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Connect to grpc://localhost:24831 2025-07-08T12:00:27.808429Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-07-08T12:00:27.813260Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24831: Failed to connect to remote host: Connection refused 2025-07-08T12:00:27.814914Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-07-08T12:00:27.817006Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:24831: Failed to connect to remote host: Connection refused 2025-07-08T12:00:28.651518Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:28.825060Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-07-08T12:00:28.829812Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Status 5 Not Found 2025-07-08T12:00:28.830020Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Request ListFoldersRequest { id: "i_am_exists" } 2025-07-08T12:00:28.830686Z node 1 :GRPC_CLIENT DEBUG: [45faff94f7f0] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-07-08T12:00:26.121015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011ba/r3tmp/tmp1Hc9yn/pdisk_1.dat 2025-07-08T12:00:26.269999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.286862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:26.319342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:26.319378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:26.330128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:26.412252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.434882Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:26.434980Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:26.450786Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:26.450821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:26.450982Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:26.450993Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:26.450999Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:26.451061Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:26.451078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:26.451090Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:26.461350Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:26.465691Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:26.465751Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:26.465772Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:26.465778Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:26.465783Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:26.465788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.465933Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:26.465954Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:26.465969Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.465975Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.465983Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:26.465988Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.466088Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:26.466126Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.466174Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:26.466193Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:26.466484Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:26.479328Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.479369Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:26.628628Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:26.629587Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:26.629615Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.629770Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.629780Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:26.629789Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:26.629854Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:26.629889Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:26.629993Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.630008Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:26.630394Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:26.630501Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.630764Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:26.630773Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.630939Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:26.630949Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.631287Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.631301Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:26.631306Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:26.631322Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:26.631333Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:26.631343Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.632221Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:26.632467Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:26.632478Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:26.632623Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:26.695826Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyh8g9aht1rhefdgaqz6xm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzcxOGIxYjMtZWY3OWRiZDAtZDE3NmRiZjQtN2M2OGRhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:26.723173Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:26.723264Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.744588Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.744651Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.745713Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:26.745986Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:26.745998Z node 1 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-07-08T12:00:26.746025Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:00:26.746037Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.746044Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.746052Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:26.746061Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.746252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.746325Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.746372Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.746376Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:26.746383Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-07-08T12:00:26.746415Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:26.746421Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.746541Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-07-08T12:00:26.746583Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715659, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-07-08T12:00:26.746604Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715659, PendingAcks: 0 2025-07-08T12:00:26.746609Z nod ... 24037893 2025-07-08T12:00:29.032136Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1174:2973], serverId# [2:1175:2974], sessionId# [0:0:0] 2025-07-08T12:00:29.032175Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1174:2973], serverId# [2:1175:2974], sessionId# [0:0:0] 2025-07-08T12:00:29.032334Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1179:2978], serverId# [2:1180:2979], sessionId# [0:0:0] 2025-07-08T12:00:29.032358Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1179:2978], serverId# [2:1180:2979], sessionId# [0:0:0] 2025-07-08T12:00:29.032508Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1184:2983], serverId# [2:1185:2984], sessionId# [0:0:0] 2025-07-08T12:00:29.032530Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1184:2983], serverId# [2:1185:2984], sessionId# [0:0:0] 2025-07-08T12:00:29.035543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.036477Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-07-08T12:00:29.036513Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:29.036550Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:29.036661Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-07-08T12:00:29.036683Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-07-08T12:00:29.036698Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:29.068929Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1207:3003] 2025-07-08T12:00:29.069024Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:29.070261Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:29.070286Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:29.070403Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-07-08T12:00:29.070414Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-07-08T12:00:29.070420Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-07-08T12:00:29.071245Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:29.071284Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:29.071296Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1223:3003] in generation 1 2025-07-08T12:00:29.092353Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:29.092379Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-07-08T12:00:29.092406Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:29.092420Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1225:3013] 2025-07-08T12:00:29.092425Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-07-08T12:00:29.092429Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-07-08T12:00:29.092434Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-07-08T12:00:29.092550Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-07-08T12:00:29.092570Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-07-08T12:00:29.092693Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-07-08T12:00:29.092701Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:29.092709Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-07-08T12:00:29.092715Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-07-08T12:00:29.092730Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1206:3002], serverId# [2:1214:3007], sessionId# [0:0:0] 2025-07-08T12:00:29.092765Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-07-08T12:00:29.092813Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-07-08T12:00:29.092829Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-07-08T12:00:29.092916Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-07-08T12:00:29.103344Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-07-08T12:00:29.103386Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:29.230836Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1235:3023], serverId# [2:1237:3025], sessionId# [0:0:0] 2025-07-08T12:00:29.230973Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-07-08T12:00:29.230982Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-07-08T12:00:29.231138Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-07-08T12:00:29.231146Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:29.231154Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-07-08T12:00:29.231217Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-07-08T12:00:29.231243Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:29.231299Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-07-08T12:00:29.231309Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-07-08T12:00:29.231387Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:29.231454Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:29.231731Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-07-08T12:00:29.231738Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-07-08T12:00:29.231832Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-07-08T12:00:29.231841Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-07-08T12:00:29.232008Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-07-08T12:00:29.232015Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-07-08T12:00:29.232020Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-07-08T12:00:29.232035Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:29.232042Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-07-08T12:00:29.232052Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-07-08T12:00:29.232292Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-07-08T12:00:29.232308Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:29.232320Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-07-08T12:00:29.232337Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:29.232367Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-07-08T12:00:29.232382Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-07-08T12:00:29.232394Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:29.232502Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-07-08T12:00:29.232572Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-07-08T12:00:29.232577Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-07-08T12:00:29.233358Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1264:3046], serverId# [2:1265:3047], sessionId# [0:0:0] 2025-07-08T12:00:29.233399Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1264:3046], serverId# [2:1265:3047], sessionId# [0:0:0] 2025-07-08T12:00:29.233564Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1269:3051], serverId# [2:1270:3052], sessionId# [0:0:0] 2025-07-08T12:00:29.233589Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1269:3051], serverId# [2:1270:3052], sessionId# [0:0:0] 2025-07-08T12:00:29.233742Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1274:3056], serverId# [2:1275:3057], sessionId# [0:0:0] 2025-07-08T12:00:29.233767Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1274:3056], serverId# [2:1275:3057], sessionId# [0:0:0] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:27.268747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:27.268776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:27.268782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:27.268787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:27.268793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:27.268797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:27.268826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:27.268840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:27.268923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:27.283455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:27.283479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:27.288623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:27.288697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:27.288730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:27.290777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:27.290854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:27.290960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:27.291144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:27.292146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:27.292193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:27.292450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:27.292463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:27.292481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:27.292489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:27.292495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:27.292526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.293958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:27.315833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:27.315909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.315970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:27.316012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:27.316024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.316854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:27.316884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:27.316932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.316943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:27.316971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:27.316977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:27.320258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.320278Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:27.320284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:27.320796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.320810Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.320816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:27.320823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:27.321472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:27.321921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:27.321963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:27.322151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:27.322175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:27.322196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:27.322273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:27.322280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:27.322310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:27.322321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:27.322749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:27.322757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:27.322801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:27.322807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:27.322819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:27.322825Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:27.322836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:27.322840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:27.322845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:27.322848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:27.322852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:27.322857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:27.322862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:27.322866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:27.322877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:27.322882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:27.322887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:27.323268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:27.323284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... parts: 1/1 2025-07-08T12:00:28.753510Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T12:00:28.753517Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:28.753521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-07-08T12:00:28.753525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:28.753530Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T12:00:28.753533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T12:00:28.753560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:28.753565Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-07-08T12:00:28.753569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-07-08T12:00:28.753661Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:28.753674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:28.753678Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-07-08T12:00:28.753683Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-07-08T12:00:28.753687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:28.753698Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-07-08T12:00:28.754826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T12:00:28.757498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T12:00:28.757508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T12:00:28.757560Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T12:00:28.757572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T12:00:28.757576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:750:2663] TestWaitNotification: OK eventTxId 105 2025-07-08T12:00:29.451337Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:29.451419Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 95us result status StatusSuccess 2025-07-08T12:00:29.451540Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:29.547429Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:29.547524Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 115us result status StatusSuccess 2025-07-08T12:00:29.547689Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-07-08T12:00:29.548392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:29.548437Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.548461Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-07-08T12:00:29.557424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:29.557481Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T12:00:29.557586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-07-08T12:00:29.557594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-07-08T12:00:29.557682Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-07-08T12:00:29.557705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-07-08T12:00:29.557711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:765:2677] TestWaitNotification: OK eventTxId 106 >> TServiceAccountServiceTest::IssueToken [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-07-08T12:00:26.176924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011c4/r3tmp/tmpflsVX5/pdisk_1.dat 2025-07-08T12:00:26.298104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.315629Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:26.349748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:26.349787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:26.361419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:26.446662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:26.470047Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:625:2535] 2025-07-08T12:00:26.470122Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:26.488107Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:26.488152Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:26.488312Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:26.488321Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:26.488327Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:26.488387Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:26.488427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:26.488437Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:652:2535] in generation 1 2025-07-08T12:00:26.488861Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:627:2537] 2025-07-08T12:00:26.488901Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:26.494925Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:26.495014Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:631:2539] 2025-07-08T12:00:26.495045Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:26.496017Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:26.496145Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T12:00:26.496154Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T12:00:26.496160Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T12:00:26.496194Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:26.496254Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:26.496261Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:675:2537] in generation 1 2025-07-08T12:00:26.496309Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:26.496318Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:26.496379Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-07-08T12:00:26.496383Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-07-08T12:00:26.496386Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-07-08T12:00:26.496400Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:26.496410Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:26.496415Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:676:2539] in generation 1 2025-07-08T12:00:26.509224Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:26.517115Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:26.517189Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:26.517214Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2566] 2025-07-08T12:00:26.517219Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:26.517223Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:26.517227Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:26.517263Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:26.517271Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T12:00:26.517282Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:26.517288Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:681:2567] 2025-07-08T12:00:26.517292Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:26.517295Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T12:00:26.517297Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:26.517392Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:26.517397Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-07-08T12:00:26.517407Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:26.517416Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:682:2568] 2025-07-08T12:00:26.517419Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T12:00:26.517423Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-07-08T12:00:26.517426Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:26.517535Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:26.517558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:26.517564Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T12:00:26.517571Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T12:00:26.517587Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:26.517594Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.517601Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:26.517606Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:26.517612Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:26.517615Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.517619Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:26.517623Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:26.517629Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-07-08T12:00:26.517647Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-07-08T12:00:26.517664Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:614:2530], serverId# [1:640:2543], sessionId# [0:0:0] 2025-07-08T12:00:26.517670Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:615:2531], serverId# [1:648:2549], sessionId# [0:0:0] 2025-07-08T12:00:26.517675Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:26.517679Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:26.517682Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-07-08T12:00:26.517686Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:26.517731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:26.517779Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:26.517801Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:26.517914Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:26.517941Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T12:00:26.517954Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T12:00:26.518430Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:26.518451Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:26.529201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:26.529248Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:26.529423Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:26.529435Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:26.577136Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:616:2532], serverId# [1:699:2578], sessionId# [0:0:0] 2025-07-08T12:00:26.577204Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:26.577256Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-07-08T12:00:26.577286Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-07-08T12:00:26.577393Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:26.589192Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:26.589230Z ... node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:29.399448Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:29.399616Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:29.399627Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:29.399634Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:29.399683Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:29.399707Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:29.399719Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:619:2519] in generation 1 2025-07-08T12:00:29.410307Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:29.410336Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:29.410367Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:29.410380Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:621:2529] 2025-07-08T12:00:29.410385Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:29.410389Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:29.410394Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.410511Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:29.410533Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:29.410550Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:29.410556Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:29.410564Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:29.410569Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:29.410661Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:599:2516], serverId# [3:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:29.410698Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:29.410752Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:29.410770Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:29.411061Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:29.421521Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:29.421562Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:29.579749Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:640:2542], serverId# [3:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:29.579852Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:29.579861Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.580091Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:29.580101Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:29.580110Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:29.580164Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:29.580195Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:29.580291Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:29.580305Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:29.580387Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:29.580460Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:29.580787Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:29.580795Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.580916Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:29.580926Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:29.581143Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:29.581154Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:29.581158Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:29.581173Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:29.581182Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:29.581191Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.581392Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:29.581671Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:29.581700Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:29.581706Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:29.598891Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyhbceazck2mmcskpcdp49, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzQ0MTYyYWItNTkyZTkyNmEtNjljNzU4ZDgtYTJhNmNiMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:29.599051Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:685:2577], serverId# [3:686:2578], sessionId# [0:0:0] 2025-07-08T12:00:29.599115Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:29.631695Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:29.631747Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.679227Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyhbe26vyj8ab97bfg4gah, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY4NDczNzEtNDhkM2YzNWYtZWFkZmY1MjMtZDRmOWU5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:29.679732Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715659, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-07-08T12:00:29.681181Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:719:2603], serverId# [3:720:2604], sessionId# [0:0:0] 2025-07-08T12:00:29.681440Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-07-08T12:00:29.693200Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-07-08T12:00:29.693226Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.693238Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1001 from mediator time cast 2025-07-08T12:00:29.693429Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1001 at tablet 72075186224037888 2025-07-08T12:00:29.693438Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.693487Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-07-08T12:00:29.693495Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-07-08T12:00:29.693571Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:29.693579Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:29.693587Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:29.693596Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:29.693610Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:719:2603], serverId# [3:720:2604], sessionId# [0:0:0] 2025-07-08T12:00:29.708381Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyhbfxa9keme0hyz66y4wj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY4NDczNzEtNDhkM2YzNWYtZWFkZmY1MjMtZDRmOWU5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:29.708560Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:29.719124Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:29.719176Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:29.720759Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY4NDczNzEtNDhkM2YzNWYtZWFkZmY1MjMtZDRmOWU5ZDE=, ActorId: [3:692:2583], ActorState: ExecuteState, TraceId: 01jzmyhbfxa9keme0hyz66y4wj, Create QueryResponse for error on request, msg: 2025-07-08T12:00:29.721100Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyhbfxa9keme0hyz66y4wj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY4NDczNzEtNDhkM2YzNWYtZWFkZmY1MjMtZDRmOWU5ZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:29.721228Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:29.721354Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:29.721365Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 14502, MsgBus: 9866 2025-07-08T12:00:26.761086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679738091983172:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:26.761114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00147c/r3tmp/tmpoOQ9T6/pdisk_1.dat 2025-07-08T12:00:26.858465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:26.858494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:26.867661Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:26.868081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14502, node 1 2025-07-08T12:00:26.897215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:26.897225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:26.897227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:26.897263Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9866 TClient is connected to server localhost:9866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:26.997691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:27.009357Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:00:27.021734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:27.055339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:27.084431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.145601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:27.271751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.280617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.295292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.306196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.364286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.378365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.391110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.554494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.674599Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976027711, txId: 281474976710674] shutting down 864000000000 2025-07-08T12:00:27.753387Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 2384, MsgBus: 13383 2025-07-08T12:00:28.001414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679744614583774:2059];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:28.001433Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00147c/r3tmp/tmpE9PjfR/pdisk_1.dat 2025-07-08T12:00:28.014908Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2384, node 2 2025-07-08T12:00:28.032743Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:28.032759Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:28.032761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:28.032797Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13383 TClient is connected to server localhost:13383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:28.108241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:28.108263Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:28.108557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.108908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:28.110652Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:28.117627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.136463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.155978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.169321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:28.501990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.511284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.521056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.537945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.546753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.560927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.577794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:28.758283Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976028803, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 10257, MsgBus: 19468 2025-07-08T12:00:29.055567Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679748135189978:2244];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00147c/r3tmp/tmpsZReir/pdisk_1.dat 2025-07-08T12:00:29.061109Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:29.071057Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10257, node 3 2025-07-08T12:00:29.081623Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:29.081634Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:29.081637Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:29.081671Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19468 TClient is connected to server localhost:19468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:29.161210Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:29.161244Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:29.161673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.162150Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:29.163971Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:29.178121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:29.197113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:29.222206Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:29.236627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:29.404585Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.414918Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.429719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.446991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.458522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.469825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.485079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.715180Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976029755, txId: 281474976715670] shutting down 2025-07-08T12:00:29.747110Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976029790, txId: 281474976715672] shutting down |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::IncrementImmediate >> DataShardWrite::UpsertImmediate >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:29.799481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:29.799499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:29.799503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:29.799506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:29.799510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:29.799512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:29.799521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:29.799531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:29.799582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:29.809146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:29.809162Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:29.812268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:29.812300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:29.812322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:29.813450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:29.813510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:29.813605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.813755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:29.814310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.814340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:29.814518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:29.814523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.814534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:29.814539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:29.814544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:29.814561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.815332Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:29.829444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:29.829507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.829568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:29.829605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:29.829614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.833214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.833238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:29.833277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.833285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:29.833288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:29.833292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:29.833685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.833699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:29.833704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:29.834053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.834064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.834071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.834078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.834613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:29.835003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:29.835043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:29.835225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.835249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:29.835273Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.835345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:29.835352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.835381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:29.835393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:29.835787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:29.835795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:29.835839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.835844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:29.835853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.835859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:29.835871Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:29.835875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.835881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:29.835884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.835888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:29.835893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.835898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:29.835902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:29.835913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:29.835919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:29.835923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:29.836288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:29.836305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 2057594046678944 2025-07-08T12:00:30.519141Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.519154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.519184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-07-08T12:00:30.519515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-07-08T12:00:30.519547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-07-08T12:00:30.519564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-07-08T12:00:30.519620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-07-08T12:00:30.519643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-07-08T12:00:30.519650Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-07-08T12:00:30.519654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-07-08T12:00:30.519658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T12:00:30.521275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T12:00:30.521339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T12:00:30.521345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T12:00:30.521417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-07-08T12:00:30.521422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-07-08T12:00:30.521430Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-07-08T12:00:30.553271Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:30.553318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:30.553331Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-07-08T12:00:30.553339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-07-08T12:00:30.563385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-07-08T12:00:30.563446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-07-08T12:00:30.563457Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-07-08T12:00:30.563470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.563476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T12:00:30.563525Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-07-08T12:00:30.563563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:30.564437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.564525Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:30.564532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:30.564596Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:30.564603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-07-08T12:00:30.564752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.564762Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-07-08T12:00:30.564774Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T12:00:30.564778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.564783Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T12:00:30.564786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.564790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-07-08T12:00:30.564795Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.564800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T12:00:30.564808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T12:00:30.564840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:30.564846Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-07-08T12:00:30.564849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-07-08T12:00:30.564978Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:30.564992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:30.564996Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-07-08T12:00:30.565001Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-07-08T12:00:30.565006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:30.565019Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-07-08T12:00:30.565023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:405:2371] 2025-07-08T12:00:30.567039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-07-08T12:00:30.567077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T12:00:30.567083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:662:2585] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-07-08T12:00:30.570845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:30.570899Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.570940Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-07-08T12:00:30.571475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:30.571508Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-07-08T12:00:30.571567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-07-08T12:00:30.571576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-07-08T12:00:30.571650Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-07-08T12:00:30.571669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-07-08T12:00:30.571674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:755:2666] TestWaitNotification: OK eventTxId 106 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-07-08T12:00:29.619907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679748096943791:2227];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:29.619949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001196/r3tmp/tmp3rbuzs/pdisk_1.dat 2025-07-08T12:00:29.678428Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:00:29.732944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:29.735362Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:00:29.751732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:29.751754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:29.752678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001196/r3tmp/tmprHRDWo/pdisk_1.dat 2025-07-08T12:00:30.093154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:30.096262Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:30.184122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:30.184166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:30.184535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:30.185485Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:00:30.185647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected >> DataShardWrite::UpsertPrepared+Volatile |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::AllowedSymbolsReboots |67.3%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:28.152908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:28.152932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.152937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:28.152941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:28.152963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:28.152967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:28.152976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:28.152987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:28.153051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:28.164935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:28.164965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.170106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:28.170150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:28.170176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:28.171344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:28.171392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:28.171473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.171586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:28.172192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.172221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:28.172413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.172420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.172434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:28.172441Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.172446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:28.172466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.173480Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:28.191371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:28.191430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.191482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:28.191519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:28.191528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.196090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.196126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:28.196178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.196190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:28.196195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:28.196200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:28.197243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.197261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:28.197267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:28.213302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.213328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.213337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.213346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.213971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:28.218804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:28.218859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:28.219082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:28.219115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:28.219135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.219221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:28.219230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:28.219264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:28.219278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:28.219904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:28.219914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:28.219954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:28.219959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:28.219969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:28.219976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:28.219986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.219990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.219995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:28.219998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.220002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:28.220007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:28.220011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:28.220015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:28.220026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:28.220035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:28.220038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:28.220438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:28.220453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... AT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T12:00:30.643262Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-07-08T12:00:30.643301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:30.644015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.644094Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:30.644101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:30.644170Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:30.644176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-07-08T12:00:30.644310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-07-08T12:00:30.644318Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-07-08T12:00:30.644330Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T12:00:30.644334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.644339Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-07-08T12:00:30.644342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.644346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-07-08T12:00:30.644351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-07-08T12:00:30.644357Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-07-08T12:00:30.644361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-07-08T12:00:30.644388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:30.644393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-07-08T12:00:30.644397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-07-08T12:00:30.644491Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:30.644501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-07-08T12:00:30.644505Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-07-08T12:00:30.644510Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-07-08T12:00:30.644514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:30.644526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-07-08T12:00:30.646733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-07-08T12:00:30.650086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-07-08T12:00:30.650099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-07-08T12:00:30.650189Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-07-08T12:00:30.650213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-07-08T12:00:30.650219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:752:2663] TestWaitNotification: OK eventTxId 105 2025-07-08T12:00:31.229427Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:31.229534Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 122us result status StatusSuccess 2025-07-08T12:00:31.229719Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:31.305249Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:31.305366Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 143us result status StatusSuccess 2025-07-08T12:00:31.305544Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate |67.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] Test command err: 2025-07-08T11:58:14.579282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679168546539975:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:14.579564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:14.614096Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b48/r3tmp/tmpG2f3ki/pdisk_1.dat 2025-07-08T11:58:14.651193Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11579, node 1 2025-07-08T11:58:14.661435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001b48/r3tmp/yandexX2RzNJ.tmp 2025-07-08T11:58:14.661444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001b48/r3tmp/yandexX2RzNJ.tmp 2025-07-08T11:58:14.661492Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001b48/r3tmp/yandexX2RzNJ.tmp 2025-07-08T11:58:14.661529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:14.664819Z INFO: TTestServer started on Port 4823 GrpcPort 11579 TClient is connected to server localhost:4823 PQClient connected to localhost:11579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:14.717874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:14.720529Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T11:58:14.725648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:14.725680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:14.726740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T11:58:14.778402Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-07-08T11:58:14.779584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T11:58:14.818191Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:15.126136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.127647Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679172841507997:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:15.128085Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDQyZGE4YjAtZDRiY2Y3MDQtZDVhM2YxMDgtOTRkZDcyN2I=, ActorId: [1:7524679172841507994:2291], ActorState: ExecuteState, TraceId: 01jzmyd81ada1yvdvrxg9b3xwn, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:15.128493Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:15.136765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:15.161703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7524679172841508273:2564] 2025-07-08T11:58:15.582840Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:19.581129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679168546539975:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:19.581158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T11:58:20.405431Z :Statistics INFO: TTopicSdkTestSetup started 2025-07-08T11:58:20.429341Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-07-08T11:58:20.441606Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679194316345064:2743] connected; active server actors: 1 2025-07-08T11:58:20.441672Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-07-08T11:58:20.441742Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-07-08T11:58:20.442102Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-07-08T11:58:20.442139Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-07-08T11:58:20.442145Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:20.442153Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-07-08T11:58:20.442182Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-07-08T11:58:20.442203Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-07-08T11:58:20.442601Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-07-08T11:58:20.442606Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-07-08T11:58:20.442621Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679194316345106:2399], now have 1 active actors on pipe 2025-07-08T11:58:20.485185Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679194316345063:2742], now have 1 active actors on pipe 2025-07-08T11:58:20.485313Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:20.485795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:20.486503Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679194316345119:2773], now have 1 active actors on pipe 2025-07-08T11:58:20.486557Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:20.486933Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:20.487132Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitConfigStep 2025-07-08T11:58:20.487146Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] HandleOnInit TEvPQ::TEvProposePartitionConfig 2025-07-08T11:58:20.487204Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-07-08T11:58:20.487320Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [1:7524679194316345125:2403] 2025-07-08T11:58:20.487564Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Initializing completed. 2025-07-08T11:58:20.487567Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'test-topic' partition 0 generation 1 [1:7524679194316345125:2403] 2025-07-08T11:58:20.487574Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic test-topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-07-08T11:58:20.487676Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Process pending events. Count 1 2025-07-08T11:58:20.487703Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T11:58:20.487797Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:20.488293Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:20.490891Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1751975900535, TxId 281474976715671 2025-07-08T11:58:20.490961Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T11:58:20.501122Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportiveP ... me, $Partition, $SeqNo); 2025-07-08T12:00:32.161699Z node 14 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:00:32.161705Z node 14 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [14:7524679764296648068:2423] (SourceId=test-message_group_id, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:00:32.161708Z node 14 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-07-08T12:00:32.161818Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 14, Generation: 1 2025-07-08T12:00:32.161826Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [14:7524679764296648071:2423], now have 1 active actors on pipe 2025-07-08T12:00:32.161832Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-07-08T12:00:32.161836Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-07-08T12:00:32.161856Z node 14 :PERSQUEUE INFO: new Cookie test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 generated for partition 0 topic 'test-topic' owner test-message_group_id 2025-07-08T12:00:32.161875Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:00:32.161891Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:00:32.161915Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-07-08T12:00:32.161917Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-07-08T12:00:32.161928Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:00:32.161940Z node 14 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 2025-07-08T12:00:32.162157Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|93f42350-3b1e837e-9bb2396b-4dd5466c_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-07-08T12:00:32.162176Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|93f42350-3b1e837e-9bb2396b-4dd5466c_0] PartitionId [0] Generation [1] Write session established. Init response: session_id: "test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0" 2025-07-08T12:00:32.162183Z :TRACE: [/Root] TRACE_EVENT InitResponse partition_id=0 session_id=test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 2025-07-08T12:00:32.162191Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: set DirectWriteToPartitionId 0 2025-07-08T12:00:32.162271Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write 1 messages with Id from 1 to 1 2025-07-08T12:00:32.162294Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: close. Timeout 18446744073709.551615s 2025-07-08T12:00:32.162440Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: try to update token 2025-07-08T12:00:32.162453Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Send 1 message(s) (0 left), first sequence number is 1 2025-07-08T12:00:32.169095Z node 14 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 grpc read done: success: 1 data: write_request[data omitted] 2025-07-08T12:00:32.169253Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-07-08T12:00:32.169404Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-07-08T12:00:32.169418Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-07-08T12:00:32.169453Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-07-08T12:00:32.169470Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:00:32.169563Z node 14 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'test-topic' requestId: 2025-07-08T12:00:32.169570Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'test-topic' partition 0 2025-07-08T12:00:32.169582Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: test-topic partition: 0 SourceId: '\0test-message_group_id' SeqNo: 1 partNo : 0 messageNo: 1 size 98 offset: -1 2025-07-08T12:00:32.169628Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob processing sourceId '\0test-message_group_id' seqNo 1 partNo 0 2025-07-08T12:00:32.169945Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 part blob complete sourceId '\0test-message_group_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 187 count 1 nextOffset 1 batches 1 2025-07-08T12:00:32.169998Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 175 WTime 1751976032169 2025-07-08T12:00:32.170022Z node 14 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:00:32.170030Z node 14 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 175 2025-07-08T12:00:32.170438Z node 14 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 175 actorID [14:7524679764296647949:2402] 2025-07-08T12:00:32.170472Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 120 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:00:32.170478Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:00:32.170486Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message_group_id', Topic: 'test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-07-08T12:00:32.170509Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T12:00:32.170518Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-07-08T12:00:32.170533Z node 14 :PERSQUEUE DEBUG: Answer ok topic: 'test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-07-08T12:00:32.170558Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:00:32.170787Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 0 2025-07-08T12:00:32.170843Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-07-08T12:00:32.170850Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] OnAck: seqNo=1, txId=? 2025-07-08T12:00:32.170854Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: acknoledged message 1 2025-07-08T12:00:32.170682Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'test-topic' partition 0 user test-consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-07-08T12:00:32.170692Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-07-08T12:00:32.170697Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-07-08T12:00:32.170701Z node 14 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-07-08T12:00:32.170715Z node 14 :PERSQUEUE DEBUG: Topic 'test-topic' partition 0 user test-consumer readTimeStamp done, result 1751976032169 queuesize 0 startOffset 0 2025-07-08T12:00:32.170979Z node 14 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 175 2025-07-08T12:00:32.262598Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session will now close 2025-07-08T12:00:32.262626Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: aborting 2025-07-08T12:00:32.262842Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-07-08T12:00:32.262898Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0] PartitionId [0] Generation [1] Write session: destroy 2025-07-08T12:00:32.267243Z node 14 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 grpc read done: success: 0 data: 2025-07-08T12:00:32.267256Z node 14 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 grpc read failed 2025-07-08T12:00:32.267265Z node 14 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 grpc closed 2025-07-08T12:00:32.267269Z node 14 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: test-message_group_id|ccec1825-c6fc4b14-49a1a41-e57e5e9e_0 is DEAD 2025-07-08T12:00:32.267538Z node 14 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:00:32.268756Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [14:7524679764296648071:2423] destroyed 2025-07-08T12:00:32.268773Z node 14 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::DeleteImmediate >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter |67.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx5 [GOOD] Test command err: iteration# 5 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 11 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 17 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 23 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 29 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 35 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 41 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 47 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 53 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 59 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 65 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 71 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 77 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 83 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 89 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 95 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 101 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 107 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 113 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 119 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 125 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 131 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 137 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 143 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 149 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 155 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 161 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 167 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 173 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 179 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 185 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 191 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 197 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 203 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 209 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 215 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 221 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 227 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 233 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 239 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 245 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 251 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 257 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 263 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 269 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 275 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 281 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 287 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 293 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 299 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 305 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 311 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 317 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 323 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 329 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 335 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 341 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 347 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 353 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 359 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 689 BlobsWritten# 2041 blobsWrittenF ... blobsUnwritten# 1218 iteration# 1367 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1373 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1379 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1385 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1391 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1397 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1403 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1409 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1415 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1421 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1427 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1433 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1439 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1445 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1451 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1457 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1463 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1469 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1475 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1481 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1487 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1493 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1499 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1505 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1511 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1517 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1523 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1529 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1535 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1541 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1547 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1553 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1559 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1565 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1571 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1577 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1583 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1589 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1595 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1601 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1607 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1613 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1619 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1625 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1631 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1637 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1643 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1649 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1655 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1661 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1667 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1673 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1679 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1685 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1691 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1697 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1703 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1709 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1715 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1721 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1727 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1733 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1739 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1745 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1751 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1757 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1763 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1769 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1775 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1781 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1787 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1793 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1799 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1805 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1811 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1817 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1823 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1829 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1835 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1841 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1847 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1853 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1859 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1865 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1871 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1877 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1883 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1889 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1895 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1901 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1907 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1913 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1919 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1925 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1931 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1937 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1943 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1949 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1955 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1961 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1967 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1973 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1979 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1985 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1991 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1997 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2003 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2009 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2015 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2021 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2027 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2033 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2039 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 |67.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-07-08T12:00:27.139124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0011b6/r3tmp/tmpmrx93d/pdisk_1.dat 2025-07-08T12:00:27.273415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.293286Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:27.326728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:27.326772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:27.337400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:27.412296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:27.432631Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:625:2535] 2025-07-08T12:00:27.432700Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:27.440804Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:27.440845Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:27.441293Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:27.441312Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:27.441318Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:27.441380Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:27.441468Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:27.441479Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:652:2535] in generation 1 2025-07-08T12:00:27.441853Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:627:2537] 2025-07-08T12:00:27.441894Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:27.443396Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:27.443459Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:631:2539] 2025-07-08T12:00:27.443481Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:27.444330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:27.444438Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T12:00:27.444446Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T12:00:27.444526Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T12:00:27.444563Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:27.444645Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:27.444654Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:675:2537] in generation 1 2025-07-08T12:00:27.444715Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:27.444727Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:27.444817Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-07-08T12:00:27.444823Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-07-08T12:00:27.444827Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-07-08T12:00:27.444848Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:27.444862Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:27.444869Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:676:2539] in generation 1 2025-07-08T12:00:27.455143Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:27.459188Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:27.459251Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:27.459271Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:680:2566] 2025-07-08T12:00:27.459276Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:27.459281Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:27.459286Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:27.459317Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:27.459324Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T12:00:27.459333Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:27.459339Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:681:2567] 2025-07-08T12:00:27.459342Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:27.459344Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T12:00:27.459347Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:27.459427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:27.459433Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-07-08T12:00:27.459442Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:27.459449Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:682:2568] 2025-07-08T12:00:27.459452Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T12:00:27.459454Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-07-08T12:00:27.459457Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:27.459562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:27.459585Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:27.459590Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T12:00:27.459598Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T12:00:27.459611Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:27.459617Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:27.459625Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:27.459630Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:27.459634Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:27.459637Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:27.459640Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:27.459644Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:27.459648Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-07-08T12:00:27.459655Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-07-08T12:00:27.459671Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:614:2530], serverId# [1:640:2543], sessionId# [0:0:0] 2025-07-08T12:00:27.459677Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:615:2531], serverId# [1:648:2549], sessionId# [0:0:0] 2025-07-08T12:00:27.459680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-07-08T12:00:27.459683Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:27.459689Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-07-08T12:00:27.459693Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:27.459734Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:27.459781Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:27.459804Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:27.459907Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:27.459934Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T12:00:27.459944Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T12:00:27.460383Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:27.460403Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:27.470683Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:27.470721Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:27.470878Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:27.470889Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:27.513184Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:616:2532], serverId# [1:699:2578], sessionId# [0:0:0] 2025-07-08T12:00:27.513251Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-07-08T12:00:27.513299Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-07-08T12:00:27.513326Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-07-08T12:00:27.513429Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:27.529219Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-07-08T12:00:27.529263Z ... de 3 :TX_DATASHARD DEBUG: Deleted RS at 72075186224037893 source 72075186224037893 dest 72075186224037892 consumer 72075186224037892 seqno 1 txId 281474976715664 2025-07-08T12:00:34.570470Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:34.570497Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-07-08T12:00:34.570538Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 2000 TxId: 281474976715665 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037893 2025-07-08T12:00:34.570573Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2000 TxId: 281474976715665 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037893 2025-07-08T12:00:34.570588Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2000 TxId: 281474976715665 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037893 2025-07-08T12:00:34.570637Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:34.583299Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 2000} 2025-07-08T12:00:34.583329Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-07-08T12:00:34.597234Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2000} 2025-07-08T12:00:34.597268Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-07-08T12:00:34.597280Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037893 from 72075186224037893 to 72075186224037892 txId 281474976715665 2025-07-08T12:00:34.597290Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-07-08T12:00:34.597311Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715665] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1269:2959], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:34.597330Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037893, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 7] SchemaVersion: 1 } 2025-07-08T12:00:34.597339Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-07-08T12:00:34.597439Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1269:2959] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715665, shard# 72075186224037893, status# 2 2025-07-08T12:00:34.597462Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037892 source 72075186224037893 dest 72075186224037892 producer 72075186224037893 txId 281474976715665 2025-07-08T12:00:34.597479Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037892 got read set: {TEvReadSet step# 2000 txid# 281474976715665 TabletSource# 72075186224037893 TabletDest# 72075186224037892 SetTabletProducer# 72075186224037893 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-07-08T12:00:34.597501Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037892 2025-07-08T12:00:34.597600Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-07-08T12:00:34.597609Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:34.597617Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715665] at 72075186224037892 for LoadAndWaitInRS 2025-07-08T12:00:34.597710Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:34.597739Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037893 2025-07-08T12:00:34.597825Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1085:2853], at tablet# 72075186224037893 2025-07-08T12:00:34.597833Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037893 2025-07-08T12:00:34.597938Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037893, generation# 1, at tablet# 72075186224037891 2025-07-08T12:00:34.619678Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-07-08T12:00:34.619729Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715665] from 72075186224037892 at tablet 72075186224037892 send result to client [3:1269:2959], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:00:34.619748Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037892 {TEvReadSet step# 2000 txid# 281474976715665 TabletSource# 72075186224037893 TabletDest# 72075186224037892 SetTabletConsumer# 72075186224037892 Flags# 0 Seqno# 2} 2025-07-08T12:00:34.619756Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-07-08T12:00:34.619807Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037893 source 72075186224037893 dest 72075186224037892 consumer 72075186224037892 txId 281474976715665 2025-07-08T12:00:34.619835Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1269:2959] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715665, shard# 72075186224037892, status# 2 2025-07-08T12:00:34.619844Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1269:2959] Reply: txId# 281474976715665, status# OK, error# 2025-07-08T12:00:34.619962Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-07-08T12:00:34.619972Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037893 2025-07-08T12:00:34.619993Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:1264:2955], serverId# [3:1265:2956], sessionId# [0:0:0] 2025-07-08T12:00:34.620036Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037893 2025-07-08T12:00:34.620042Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037893 2025-07-08T12:00:34.620065Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037893 2025-07-08T12:00:34.620071Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037893 2025-07-08T12:00:34.620116Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-07-08T12:00:34.620125Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:34.620134Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-07-08T12:00:34.620390Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-07-08T12:00:34.620484Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-07-08T12:00:34.620531Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-07-08T12:00:34.620538Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.620545Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037892 for WaitForStreamClearance 2025-07-08T12:00:34.620586Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.620596Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-07-08T12:00:34.620719Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715666, MessageQuota: 1 2025-07-08T12:00:34.620749Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715666, MessageQuota: 1 2025-07-08T12:00:34.620797Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-07-08T12:00:34.620802Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037892 2025-07-08T12:00:34.620833Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-07-08T12:00:34.620838Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.620844Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037892 for ReadTableScan 2025-07-08T12:00:34.620876Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:34.620884Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-07-08T12:00:34.620891Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-07-08T12:00:34.621124Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-07-08T12:00:34.621176Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-07-08T12:00:34.621209Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-07-08T12:00:34.621214Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.621219Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037891 for WaitForStreamClearance 2025-07-08T12:00:34.621244Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.621252Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-07-08T12:00:34.621341Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715667, MessageQuota: 1 2025-07-08T12:00:34.621358Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715667, MessageQuota: 1 2025-07-08T12:00:34.621380Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-07-08T12:00:34.621384Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037891 2025-07-08T12:00:34.621413Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-07-08T12:00:34.621418Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 0 immediate 1 planned 0 2025-07-08T12:00:34.621422Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037891 for ReadTableScan 2025-07-08T12:00:34.621439Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:34.621446Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-07-08T12:00:34.621452Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> Describe::Basic [GOOD] >> Describe::DescribePartitionPermissions >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> TOlapReboots::CreateDropStandaloneTable |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 15595458311017928134 >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TOlapReboots::DropMultipleTables |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit >> TxUsage::Sinks_Oltp_WriteToTopics_4 [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> TxUsage::The_Configuration_Is_Changing_As_We_Write_To_The_Topic >> TOlapReboots::CreateStandaloneTable >> TOlapReboots::CreateStore >> DataShardSnapshots::MvccSnapshotAndSplit >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> DataShardSnapshots::MvccSnapshotTailCleanup >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> TRtmrTest::CreateWithoutTimeCastBuckets >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink |67.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:37.192030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:37.192054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:37.192060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:37.192065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:37.192071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:37.192074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:37.192086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:37.192098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:37.192160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.207307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:37.207330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.211042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.211121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:37.211169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:37.212646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:37.212695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:37.212796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:37.218382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:37.219413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:37.219463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:37.219702Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:37.219717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:37.219737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:37.219745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:37.219751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:37.219778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.221172Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:37.240481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:37.240555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.240618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:37.240657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:37.240668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.241444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:37.241467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:37.241505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.241512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:37.241516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:37.241519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:37.241937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.241949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:37.241954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:37.242293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.242302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.242306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:37.242312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:37.242682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:37.242932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:37.242960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:37.254872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:37.254922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:37.254935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:37.255033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:37.255041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:37.255154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:37.255176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:37.255899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:37.255907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:37.255948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:37.255952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:37.255960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.255966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:37.255975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:37.255979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:37.255982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:37.255984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:37.255988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:37.255991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:37.255995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:37.255997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:37.256007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:37.256011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:37.256013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:37.256363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:37.256375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.260971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.260975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-07-08T12:00:37.260979Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-07-08T12:00:37.260983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:37.260995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-07-08T12:00:37.261993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.262006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-07-08T12:00:37.262011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-07-08T12:00:37.262276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-07-08T12:00:37.262345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-07-08T12:00:37.262651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.262659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:37.262668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-07-08T12:00:37.262700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:37.263065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-07-08T12:00:37.263099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-07-08T12:00:37.263169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263196Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-07-08T12:00:37.263239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:37.263247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-07-08T12:00:37.263647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:37.263688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:37.263706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-07-08T12:00:37.263715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-07-08T12:00:37.263796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.263803Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-07-08T12:00:37.263814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-07-08T12:00:37.263822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:00:37.263827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-07-08T12:00:37.263830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:00:37.263834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-07-08T12:00:37.263839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:00:37.263844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-07-08T12:00:37.263848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-07-08T12:00:37.263858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:37.263863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-07-08T12:00:37.263867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-07-08T12:00:37.263870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:00:37.263947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.263956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.263961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-07-08T12:00:37.263965Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:00:37.263969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:37.264041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.264050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:00:37.264053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-07-08T12:00:37.264057Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:00:37.264060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:37.264068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-07-08T12:00:37.264561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-07-08T12:00:37.264795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-07-08T12:00:37.264850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-07-08T12:00:37.264860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-07-08T12:00:37.264929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-07-08T12:00:37.264960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-07-08T12:00:37.264965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 100 2025-07-08T12:00:37.265027Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:37.265051Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 34us result status StatusSuccess 2025-07-08T12:00:37.265118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> TOlapReboots::CreateMultipleTables >> DataShardSnapshots::VolatileSnapshotSplit >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |67.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |67.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr/unittest |67.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |67.6%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |67.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |67.6%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:00.154709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:00.154747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:00.154753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:00.154757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:00.154770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:00.154774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:00.154783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:00.154795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:00.154855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:00.180272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:00.180294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:00.202194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:00.202250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:00.202276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:00.234236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:00.234320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:00.234414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:00.241027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:00.261146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:00.261215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:00.261477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:00.261486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:00.261502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:00.261509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:00.261515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:00.261543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.273446Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:00.315535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:00.315613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.315671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:00.315716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:00.315725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.316420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:00.316441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:00.316478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.316487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:00.316492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:00.316497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:00.316811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.316820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:00.316825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:00.317100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.317108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.317113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:00.317120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:00.317722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:00.318055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:00.318088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:00.318243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:00.318265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:00.318272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:00.318324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:00.318343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:00.318368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:00.318377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:00.318707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:00.318714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:00.318749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:00.318754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:00.318764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:00.318769Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:00.318779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:00.318783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:00.318787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:00.318791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:00.318795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:00.318800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:00.318805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:00.318808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:00.318816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:00.318822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:00.318826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:00.319192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:00.319204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... t> execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 2025-07-08T12:00:37.552313Z node 15 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 102:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 2025-07-08T12:00:37.552327Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.552331Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 102:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-07-08T12:00:37.552377Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-07-08T12:00:37.552415Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:00:37.552426Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:37.553100Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.553183Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:37.553192Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:00:37.553236Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:37.553270Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:37.553276Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-07-08T12:00:37.553281Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-07-08T12:00:37.553453Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:00:37.553463Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T12:00:37.553476Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:00:37.553481Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:37.553485Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:00:37.553488Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:37.553493Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-07-08T12:00:37.553498Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:00:37.553503Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T12:00:37.553508Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T12:00:37.553534Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:00:37.553540Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T12:00:37.553544Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-07-08T12:00:37.553547Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:00:37.553756Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:37.553770Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:37.553774Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:37.553779Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-07-08T12:00:37.553783Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:00:37.553903Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:37.553912Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:00:37.553915Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:00:37.553919Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:00:37.553922Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:37.553930Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:00:37.555122Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:00:37.555186Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:00:37.557626Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:00:37.557637Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:00:37.557723Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:00:37.557741Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:00:37.557746Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:521:2473] TestWaitNotification: OK eventTxId 102 2025-07-08T12:00:37.557818Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:37.557866Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 56us result status StatusSuccess 2025-07-08T12:00:37.557944Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:37.557997Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:37.558014Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 20us result status StatusSuccess 2025-07-08T12:00:37.558072Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:37.609162Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-07-08T12:00:10.152137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.152385Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.152596Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.152787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.152924Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.152938Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.158797Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.159908Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.160425Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:10.469305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:10.647911Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:10.665115Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:10.812115Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17502, node 1 TClient is connected to server localhost:5590 2025-07-08T12:00:10.906518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.906537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.906541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.906641Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:20.452205Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.452690Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.454363Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.454475Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.454898Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.455638Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.455982Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.456016Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.456279Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:20.603232Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:20.782656Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:20.787003Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:20.885320Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14102, node 10 TClient is connected to server localhost:28502 2025-07-08T12:00:20.918388Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:20.918407Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:20.918411Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:20.918538Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:28.964415Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.965785Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.965991Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966037Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966251Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966528Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966615Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966762Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.966788Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:29.069230Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:29.214255Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:29.220733Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:29.316088Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19163, node 19 TClient is connected to server localhost:26409 2025-07-08T12:00:29.350558Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:29.350579Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:29.350583Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:29.350647Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:36.798205Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.798435Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.798453Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.798659Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.799310Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.799467Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.799674Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.799869Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.799883Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:36.917311Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.061206Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:37.072306Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:37.120983Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 12555, node 28 TClient is connected to server localhost:28638 2025-07-08T12:00:37.161030Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:37.161047Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:37.161051Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:37.161187Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] Test command err: 2025-07-08T12:00:31.519978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001dde/r3tmp/tmpluqvDQ/pdisk_1.dat 2025-07-08T12:00:31.642915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.662299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:31.705400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:31.705431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:31.719449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:31.798574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.815113Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:31.815312Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:31.815385Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:31.815449Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:31.825860Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:31.826004Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:31.826026Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:31.826157Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:31.826165Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:31.826170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:31.826210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:31.826226Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:31.826238Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:31.837105Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:31.840854Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:31.840908Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:31.840925Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:31.840929Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:31.840934Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:31.840938Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.841039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.841047Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.841122Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:31.841139Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:31.841154Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.841160Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:31.841165Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.841169Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:31.841173Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:31.841177Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:31.841182Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:31.841256Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.841263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.841270Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:31.841288Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:31.841292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:31.841308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:31.841345Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:31.841354Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:31.841369Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:31.841385Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:31.841389Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:31.841395Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:31.841399Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.841438Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:31.841442Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:31.841446Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:31.841449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.841460Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:31.841463Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:31.841467Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:31.841472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.841477Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:31.841708Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:31.841717Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:31.851945Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:31.851966Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.851972Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.851981Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:31.851995Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:32.019654Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.019679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.019688Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:32.019766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:603:2519]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:32.019772Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:32.019797Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:32.019805Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:32.019811Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:32.019831Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:32.020558Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:32.020579Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:32.020752Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.020762Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.020771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:32.020778Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:32.020784Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:32.020792Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:32.020797Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit ... 8T12:00:39.560053Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [7:823:2669], Recipient [7:628:2531]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-07-08T12:00:39.560057Z node 7 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037888 2025-07-08T12:00:39.560080Z node 7 :KQP_EXECUTER ERROR: ActorId: [7:913:2669] TxId: 281474976715665. Ctx: { TraceId: 01jzmyhn3zdrgyja5qpj3xhk55, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YzFjOGFmY2MtMTQ2NzZlNjItNTUzYTMzZDAtNGI1ZjA4YmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/rows`, code: 2001 } 2025-07-08T12:00:39.560112Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:628:2531], Recipient [7:628:2531]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:39.560120Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:39.560128Z node 7 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037888 2025-07-08T12:00:39.560139Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-07-08T12:00:39.560149Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715660, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-07-08T12:00:39.560157Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckWrite 2025-07-08T12:00:39.560163Z node 7 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-07-08T12:00:39.560167Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckWrite 2025-07-08T12:00:39.560171Z node 7 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:39.560174Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:39.560180Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1502/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v1501/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v1501/18446744073709551615 2025-07-08T12:00:39.560187Z node 7 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-07-08T12:00:39.560191Z node 7 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-07-08T12:00:39.560194Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:39.560197Z node 7 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteWrite 2025-07-08T12:00:39.560200Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteWrite 2025-07-08T12:00:39.560204Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [0:10] at 72075186224037888 2025-07-08T12:00:39.560214Z node 7 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-07-08T12:00:39.560218Z node 7 :TX_DATASHARD DEBUG: Skip empty write operation for [0:10] at 72075186224037888 2025-07-08T12:00:39.560247Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-07-08T12:00:39.560252Z node 7 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:39.560255Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteWrite 2025-07-08T12:00:39.560258Z node 7 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit FinishProposeWrite 2025-07-08T12:00:39.560262Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit FinishProposeWrite 2025-07-08T12:00:39.560267Z node 7 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is DelayComplete 2025-07-08T12:00:39.560270Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit FinishProposeWrite 2025-07-08T12:00:39.560273Z node 7 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:39.560276Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:39.560281Z node 7 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-07-08T12:00:39.560287Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:39.560290Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-07-08T12:00:39.560335Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YzFjOGFmY2MtMTQ2NzZlNjItNTUzYTMzZDAtNGI1ZjA4YmU=, ActorId: [7:809:2669], ActorState: ExecuteState, TraceId: 01jzmyhn3zdrgyja5qpj3xhk55, Create QueryResponse for error on request, msg: 2025-07-08T12:00:39.560481Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:62:2109], Recipient [7:628:2531]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 7 Status: STATUS_NOT_FOUND 2025-07-08T12:00:39.560504Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [7:823:2669], Recipient [7:708:2592]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715660 DataShard: 72075186224037889 Generation: 1 Counter: 18446744073709551612 SchemeShard: 72057594046644480 PathId: 3 } Op: Rollback } 2025-07-08T12:00:39.560508Z node 7 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037889 2025-07-08T12:00:39.560603Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [7:62:2109], Recipient [7:708:2592]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 7 Status: STATUS_NOT_FOUND 2025-07-08T12:00:39.560630Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [7:708:2592], Recipient [7:708:2592]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:39.560634Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:39.560639Z node 7 :TX_DATASHARD TRACE: TTxWrite:: execute at tablet# 72075186224037889 2025-07-08T12:00:39.560651Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 0 at 72075186224037889, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715660 DataShard: 72075186224037889 Generation: 1 Counter: 18446744073709551612 SchemeShard: 72057594046644480 PathId: 3 } Op: Rollback } 2025-07-08T12:00:39.560658Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715660, Uint64 : 72075186224037889, Uint64 : 72057594046644480, Uint64 : 3) table: [1:997:0] 2025-07-08T12:00:39.560666Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckWrite 2025-07-08T12:00:39.560672Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-07-08T12:00:39.560675Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckWrite 2025-07-08T12:00:39.560679Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:39.560682Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-07-08T12:00:39.560687Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v1502/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v1501/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v1501/18446744073709551615 2025-07-08T12:00:39.560693Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-07-08T12:00:39.560698Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-07-08T12:00:39.560701Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:39.560704Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteWrite 2025-07-08T12:00:39.560707Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteWrite 2025-07-08T12:00:39.560711Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [0:6] at 72075186224037889 2025-07-08T12:00:39.560721Z node 7 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715660 DataShard: 72075186224037889 Generation: 1 Counter: 18446744073709551612 SchemeShard: 72057594046644480 PathId: 3 2025-07-08T12:00:39.560725Z node 7 :TX_DATASHARD DEBUG: Skip empty write operation for [0:6] at 72075186224037889 2025-07-08T12:00:39.560730Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-07-08T12:00:39.560735Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is ExecutedNoMoreRestarts 2025-07-08T12:00:39.560738Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteWrite 2025-07-08T12:00:39.560741Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit FinishProposeWrite 2025-07-08T12:00:39.560744Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit FinishProposeWrite 2025-07-08T12:00:39.560749Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is DelayComplete 2025-07-08T12:00:39.560753Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit FinishProposeWrite 2025-07-08T12:00:39.560756Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:00:39.560759Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:00:39.560765Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-07-08T12:00:39.560768Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:00:39.560771Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-07-08T12:00:39.560776Z node 7 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037889 2025-07-08T12:00:39.560779Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:6] at 72075186224037889 on unit FinishProposeWrite 2025-07-08T12:00:39.560783Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 6 at tablet 72075186224037889 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-07-08T12:00:39.560791Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:39.560827Z node 7 :TX_DATASHARD TRACE: TTxWrite complete: at tablet# 72075186224037888 2025-07-08T12:00:39.560832Z node 7 :TX_DATASHARD TRACE: Complete execution for [0:10] at 72075186224037888 on unit FinishProposeWrite 2025-07-08T12:00:39.560835Z node 7 :TX_DATASHARD TRACE: Propose transaction complete txid 10 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-07-08T12:00:39.560841Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-07-08T12:00:31.503264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001dd9/r3tmp/tmp0bY53P/pdisk_1.dat 2025-07-08T12:00:31.633878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.650823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:31.693416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:31.693449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:31.705359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:31.789282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.805868Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:31.806063Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:31.806140Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:31.806197Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:31.825694Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:31.825854Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:31.825874Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:31.826020Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:31.826027Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:31.826033Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:31.826078Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:31.826091Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:31.826101Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:31.836353Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:31.840484Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:31.840537Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:31.840554Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:31.840560Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:31.840564Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:31.840570Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.840620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.840627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.840717Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:31.840739Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:31.840757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.840764Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:31.840770Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.840776Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:31.840780Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:31.840786Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:31.840791Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:31.840870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.840878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.840886Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:31.840903Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:31.840908Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:31.840925Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:31.841371Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:31.841385Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:31.841400Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:31.841414Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:31.841419Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:31.841424Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:31.841429Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.841464Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:31.841468Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:31.841472Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:31.841476Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.841498Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:31.841502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:31.841506Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:31.841510Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.841515Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:31.841736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:31.841743Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:31.851953Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:31.851972Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.851978Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.851987Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:31.852000Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:31.995724Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.995746Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.995756Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:31.995826Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:603:2519]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:31.995831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:31.995856Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.995865Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:31.995870Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:31.995876Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:31.996595Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:31.996604Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.996702Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.996707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.996726Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.996733Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:31.996738Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.996746Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:31.996751Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:39.410603Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:862:2721], 1001} after executionsCount# 1 2025-07-08T12:00:39.410608Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:862:2721], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:39.410615Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:862:2721], 1001} finished in read 2025-07-08T12:00:39.410620Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-07-08T12:00:39.410623Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-07-08T12:00:39.410626Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:00:39.410630Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:00:39.410635Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-07-08T12:00:39.410638Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:00:39.410642Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-07-08T12:00:39.410645Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-07-08T12:00:39.410654Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-07-08T12:00:39.410726Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:867:2726], Recipient [7:646:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.410731Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.410735Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:866:2725], serverId# [7:867:2726], sessionId# [0:0:0] 2025-07-08T12:00:39.410744Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:865:2724], Recipient [7:646:2543]: NKikimrTxDataShard.TEvGetInfoRequest 2025-07-08T12:00:39.411168Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:870:2729], Recipient [7:646:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411178Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411183Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:869:2728], serverId# [7:870:2729], sessionId# [0:0:0] 2025-07-08T12:00:39.411211Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:868:2727], Recipient [7:646:2543]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-07-08T12:00:39.411222Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-07-08T12:00:39.411227Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:00:39.411232Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-07-08T12:00:39.411238Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-07-08T12:00:39.411248Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-07-08T12:00:39.411251Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-07-08T12:00:39.411255Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:39.411258Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-07-08T12:00:39.411265Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-07-08T12:00:39.411269Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-07-08T12:00:39.411272Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:39.411276Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-07-08T12:00:39.411279Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-07-08T12:00:39.411288Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:39.411306Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:868:2727], 1002} after executionsCount# 1 2025-07-08T12:00:39.411311Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:868:2727], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:39.411318Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:868:2727], 1002} finished in read 2025-07-08T12:00:39.411324Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-07-08T12:00:39.411327Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-07-08T12:00:39.411330Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-07-08T12:00:39.411333Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-07-08T12:00:39.411339Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-07-08T12:00:39.411344Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-07-08T12:00:39.411347Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-07-08T12:00:39.411351Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-07-08T12:00:39.411360Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-07-08T12:00:39.411447Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:873:2732], Recipient [7:643:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411452Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411456Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:872:2731], serverId# [7:873:2732], sessionId# [0:0:0] 2025-07-08T12:00:39.411469Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:871:2730], Recipient [7:643:2541]: NKikimrTxDataShard.TEvGetInfoRequest 2025-07-08T12:00:39.411556Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:876:2735], Recipient [7:643:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411560Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.411564Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:875:2734], serverId# [7:876:2735], sessionId# [0:0:0] 2025-07-08T12:00:39.411582Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:874:2733], Recipient [7:643:2541]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-07-08T12:00:39.411590Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-07-08T12:00:39.411595Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:00:39.411599Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-07-08T12:00:39.411603Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-07-08T12:00:39.411609Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-07-08T12:00:39.411613Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-07-08T12:00:39.411616Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:39.411619Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-07-08T12:00:39.411624Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-07-08T12:00:39.411628Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-07-08T12:00:39.411631Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:39.411635Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-07-08T12:00:39.411638Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-07-08T12:00:39.411645Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:39.411660Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:874:2733], 1003} after executionsCount# 1 2025-07-08T12:00:39.411667Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:874:2733], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:39.411673Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:874:2733], 1003} finished in read 2025-07-08T12:00:39.411678Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-07-08T12:00:39.411681Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-07-08T12:00:39.411684Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-07-08T12:00:39.411687Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-07-08T12:00:39.411692Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-07-08T12:00:39.411695Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-07-08T12:00:39.411698Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-07-08T12:00:39.411701Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-07-08T12:00:39.411710Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] Test command err: 2025-07-08T12:00:31.304057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001dfa/r3tmp/tmpgrj5to/pdisk_1.dat 2025-07-08T12:00:31.446654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.467932Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:31.501162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:31.501213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:31.511848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:31.588309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.605454Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:31.605645Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:31.605735Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:31.606163Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:31.616797Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:31.618190Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:31.618229Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:31.618398Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:31.618408Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:31.618416Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:31.618478Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:31.618504Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:31.618521Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:31.628872Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:31.632808Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:31.632892Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:31.632917Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:31.632923Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:31.632928Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:31.632935Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.633018Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.633027Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.633132Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:31.633156Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:31.633176Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.633184Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:31.633191Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.633197Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:31.633202Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:31.633208Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:31.633214Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:31.633318Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.633324Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.633335Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:31.633356Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:31.633360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:31.633385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:31.633440Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:31.633452Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:31.633473Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:31.633490Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:31.633496Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:31.633501Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:31.633506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.633554Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:31.633559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:31.633563Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:31.633566Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.633578Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:31.633581Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:31.633585Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:31.633589Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.633594Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:31.633852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:31.633861Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:31.644201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:31.644233Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.644240Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.644252Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:31.644267Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:31.809018Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.809044Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.809054Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:31.809137Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:603:2519]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:31.809143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:31.809184Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.809194Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:31.809199Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:31.809205Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:31.809923Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:31.809945Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.810108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.810119Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.810126Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.810135Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:31.810140Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.810148Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:31.810154Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit ... 76715658 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:40.581431Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:00:40.581434Z node 8 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715658 2025-07-08T12:00:40.581439Z node 8 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1001 txid# 281474976715658 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:40.581450Z node 8 :TX_DATASHARD DEBUG: Complete [1001 : 281474976715658] from 72075186224037889 at tablet 72075186224037889 send result to client [8:739:2610], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:40.581560Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:40.581579Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:40.581616Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:40.581879Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:40.582002Z node 8 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-07-08T12:00:40.582016Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [8:625:2532], Recipient [8:628:2534]: {TEvReadSet step# 1001 txid# 281474976715658 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-07-08T12:00:40.582021Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:00:40.582026Z node 8 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715658 2025-07-08T12:00:40.582050Z node 8 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-07-08T12:00:40.582065Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [8:628:2534], Recipient [8:625:2532]: {TEvReadSet step# 1001 txid# 281474976715658 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-07-08T12:00:40.582068Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:00:40.582072Z node 8 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715658 ... validating table 2025-07-08T12:00:40.623847Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyhp4h5zxb7khynn5zrqsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=YzlkNDM2ODItM2RmY2I3Y2UtZGU1MzY1M2EtZWY1NGEyMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:40.624616Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:789:2663], Recipient [8:625:2532]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-07-08T12:00:40.624651Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-07-08T12:00:40.624667Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-07-08T12:00:40.624686Z node 8 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:40.624691Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-07-08T12:00:40.624696Z node 8 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:40.624700Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:40.624712Z node 8 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-07-08T12:00:40.624718Z node 8 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:40.624721Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:40.624725Z node 8 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-07-08T12:00:40.624729Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-07-08T12:00:40.624746Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-07-08T12:00:40.624788Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1001/18446744073709551615 2025-07-08T12:00:40.624794Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:789:2663], 0} after executionsCount# 1 2025-07-08T12:00:40.624801Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:789:2663], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:40.624815Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:789:2663], 0} finished in read 2025-07-08T12:00:40.624824Z node 8 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:40.624827Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-07-08T12:00:40.624831Z node 8 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:40.624835Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:40.624844Z node 8 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-07-08T12:00:40.624847Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:40.624851Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-07-08T12:00:40.624855Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-07-08T12:00:40.624873Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-07-08T12:00:40.625098Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:789:2663], Recipient [8:625:2532]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-07-08T12:00:40.625110Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-07-08T12:00:40.625173Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:789:2663], Recipient [8:628:2534]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-07-08T12:00:40.625189Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-07-08T12:00:40.625196Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-07-08T12:00:40.625206Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:40.625210Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-07-08T12:00:40.625213Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:40.625217Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-07-08T12:00:40.625224Z node 8 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-07-08T12:00:40.625229Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:40.625232Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:40.625235Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-07-08T12:00:40.625253Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-07-08T12:00:40.625265Z node 8 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-07-08T12:00:40.625288Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1001/18446744073709551615 2025-07-08T12:00:40.625293Z node 8 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[8:789:2663], 1} after executionsCount# 1 2025-07-08T12:00:40.625298Z node 8 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[8:789:2663], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:40.625307Z node 8 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[8:789:2663], 1} finished in read 2025-07-08T12:00:40.625313Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:40.625316Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-07-08T12:00:40.625320Z node 8 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:00:40.625324Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:00:40.625330Z node 8 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:40.625333Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:00:40.625337Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-07-08T12:00:40.625341Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-07-08T12:00:40.625352Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-07-08T12:00:40.625467Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:789:2663], Recipient [8:628:2534]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-07-08T12:00:40.625474Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } |67.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |67.7%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> Describe::DescribePartitionPermissions [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-07-08T12:00:31.554803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001de3/r3tmp/tmpzjmbV7/pdisk_1.dat 2025-07-08T12:00:31.689291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.709068Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:31.741360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:31.741407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:31.752267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:31.832833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.850210Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:31.850419Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:31.850509Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:31.850572Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:31.859119Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:31.859293Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:31.859315Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:31.859475Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:31.859483Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:31.859490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:31.859542Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:31.859556Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:31.859567Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:31.873176Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:31.877054Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:31.877117Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:31.877136Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:31.877141Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:31.877146Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:31.877151Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:31.877201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.877208Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:31.877297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:31.877322Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:31.877338Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:31.877344Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:31.877351Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:31.877356Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:31.877360Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:31.877365Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:31.877370Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:31.877464Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.877471Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:31.877478Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:31.877496Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:31.877500Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:31.877522Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:31.877576Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:31.877586Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:31.877602Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:31.877617Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:31.877622Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:31.877627Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:31.877631Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.877673Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:31.877677Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:31.877681Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:31.877684Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.877693Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:31.877696Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:31.877700Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:31.877703Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:31.877709Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:31.877922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:31.877928Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:31.889080Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:31.889104Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:31.889111Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:31.889124Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:31.889139Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:32.050426Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.050451Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.050461Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:32.050552Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:603:2519]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:32.050558Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:32.050595Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:32.050605Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:32.050610Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:32.050616Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:32.051394Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:32.051416Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:32.051589Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.051598Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.051607Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:32.051616Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:32.051621Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:32.051630Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:32.051635Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit ... 75186224037888 is Executed 2025-07-08T12:00:41.403974Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:41.403978Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:41.404010Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:41.404014Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:41.404126Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:736:2611], Recipient [7:736:2611]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:41.404129Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:41.404134Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:41.404138Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:41.404141Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:41.404145Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:41.404148Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-07-08T12:00:41.404152Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404154Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-07-08T12:00:41.404157Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-07-08T12:00:41.404160Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-07-08T12:00:41.404211Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-07-08T12:00:41.404225Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-07-08T12:00:41.404229Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-07-08T12:00:41.404239Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-07-08T12:00:41.404242Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404244Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-07-08T12:00:41.404246Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:41.404249Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:41.404258Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-07-08T12:00:41.404261Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-07-08T12:00:41.404263Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-07-08T12:00:41.404266Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404268Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:41.404271Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-07-08T12:00:41.404273Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-07-08T12:00:41.404281Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404283Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-07-08T12:00:41.404285Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-07-08T12:00:41.404287Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-07-08T12:00:41.404290Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404292Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-07-08T12:00:41.404294Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-07-08T12:00:41.404296Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-07-08T12:00:41.404300Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404302Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-07-08T12:00:41.404303Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-07-08T12:00:41.404306Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-07-08T12:00:41.404308Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404310Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-07-08T12:00:41.404312Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-07-08T12:00:41.404314Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-07-08T12:00:41.404318Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-07-08T12:00:41.404397Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-07-08T12:00:41.404406Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-07-08T12:00:41.404411Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-07-08T12:00:41.404413Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:41.404415Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:41.404417Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:41.404419Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:41.404469Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:41.404472Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-07-08T12:00:41.404474Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-07-08T12:00:41.404493Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-07-08T12:00:41.404497Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-07-08T12:00:41.404500Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-07-08T12:00:41.404505Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-07-08T12:00:41.404521Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-07-08T12:00:41.404528Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-07-08T12:00:41.404537Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-07-08T12:00:41.404544Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:41.404547Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-07-08T12:00:41.404549Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-07-08T12:00:41.404551Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-07-08T12:00:41.404577Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-07-08T12:00:41.404580Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-07-08T12:00:41.404582Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:41.404584Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:41.404587Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-07-08T12:00:41.404589Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:41.404591Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-07-08T12:00:41.404593Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:41.404597Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:41.404599Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:41.404600Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:41.404657Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-07-08T12:00:41.404702Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:41.404705Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-07-08T12:00:41.404713Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:729:2605] 2025-07-08T12:00:41.404718Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-07-08T12:00:31.743384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001db8/r3tmp/tmpARCVT1/pdisk_1.dat 2025-07-08T12:00:31.869462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:31.886842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:31.921343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:31.921380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:31.931887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:32.004778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:32.021146Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:32.021320Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:32.021387Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:32.021456Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:32.028582Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:32.028734Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:32.028750Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:32.028880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:32.028888Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:32.028895Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:32.028944Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:32.028987Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:32.029000Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:32.043021Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:32.047357Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:32.047441Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:32.047466Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:32.047472Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:32.047478Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:32.047483Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:32.047546Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.047553Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.047647Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:32.047670Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:32.047685Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:32.047691Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:32.047698Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:32.047703Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:32.047707Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:32.047711Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:32.047716Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:32.047804Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.047813Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.047820Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:32.047836Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:32.047840Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:32.047859Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:32.047901Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:32.047909Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:32.047924Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:32.047940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:32.047944Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:32.047949Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:32.047953Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:32.047991Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:32.047995Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:32.047999Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:32.048003Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:32.048014Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:32.048018Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:32.048021Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:32.048025Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:32.048030Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:32.048268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:32.048279Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:32.058576Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:32.058606Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:32.058613Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:32.058624Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:32.058640Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:32.211424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:642:2544], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.211446Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:32.211454Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:00:32.211537Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:603:2519]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:32.211542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:32.211569Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:32.211578Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:32.211582Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:32.211587Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:32.212421Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:32.212438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:32.212570Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.212577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:32.212584Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:32.212592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:32.212597Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:00:32.212606Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:32.212611Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit ... 594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:41.537964Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:915:2756], 1001} after executionsCount# 1 2025-07-08T12:00:41.537967Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:915:2756], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:41.537976Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:915:2756], 1001} finished in read 2025-07-08T12:00:41.537980Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:41.537982Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-07-08T12:00:41.537984Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:00:41.537986Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:00:41.537990Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:00:41.537992Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:00:41.537994Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-07-08T12:00:41.537996Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-07-08T12:00:41.538004Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-07-08T12:00:41.538100Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:921:2762], Recipient [7:646:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538105Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538109Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:920:2761], serverId# [7:921:2762], sessionId# [0:0:0] 2025-07-08T12:00:41.538124Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:919:2760], Recipient [7:646:2543]: NKikimrTxDataShard.TEvGetInfoRequest 2025-07-08T12:00:41.538194Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:924:2765], Recipient [7:646:2543]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538197Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538200Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:923:2764], serverId# [7:924:2765], sessionId# [0:0:0] 2025-07-08T12:00:41.538215Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:922:2763], Recipient [7:646:2543]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-07-08T12:00:41.538223Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-07-08T12:00:41.538227Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:00:41.538230Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-07-08T12:00:41.538235Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-07-08T12:00:41.538241Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-07-08T12:00:41.538243Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-07-08T12:00:41.538246Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:41.538248Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-07-08T12:00:41.538253Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-07-08T12:00:41.538257Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-07-08T12:00:41.538259Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:41.538261Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-07-08T12:00:41.538263Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-07-08T12:00:41.538269Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:41.538283Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:922:2763], 1002} after executionsCount# 1 2025-07-08T12:00:41.538287Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:922:2763], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:41.538301Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:922:2763], 1002} finished in read 2025-07-08T12:00:41.538305Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-07-08T12:00:41.538307Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-07-08T12:00:41.538309Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-07-08T12:00:41.538313Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-07-08T12:00:41.538319Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-07-08T12:00:41.538322Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-07-08T12:00:41.538326Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-07-08T12:00:41.538329Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-07-08T12:00:41.538339Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-07-08T12:00:41.538431Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:928:2769], Recipient [7:643:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538435Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538438Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:927:2768], serverId# [7:928:2769], sessionId# [0:0:0] 2025-07-08T12:00:41.538445Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:926:2767], Recipient [7:643:2541]: NKikimrTxDataShard.TEvGetInfoRequest 2025-07-08T12:00:41.538502Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:931:2772], Recipient [7:643:2541]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538506Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:41.538509Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:930:2771], serverId# [7:931:2772], sessionId# [0:0:0] 2025-07-08T12:00:41.538522Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:929:2770], Recipient [7:643:2541]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-07-08T12:00:41.538529Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-07-08T12:00:41.538532Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:00:41.538534Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-07-08T12:00:41.538538Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-07-08T12:00:41.538543Z node 7 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-07-08T12:00:41.538547Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-07-08T12:00:41.538551Z node 7 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:41.538555Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-07-08T12:00:41.538561Z node 7 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-07-08T12:00:41.538566Z node 7 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-07-08T12:00:41.538569Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:41.538572Z node 7 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-07-08T12:00:41.538576Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-07-08T12:00:41.538585Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-07-08T12:00:41.538597Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:929:2770], 1003} after executionsCount# 1 2025-07-08T12:00:41.538601Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:929:2770], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:41.538606Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:929:2770], 1003} finished in read 2025-07-08T12:00:41.538612Z node 7 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-07-08T12:00:41.538615Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-07-08T12:00:41.538619Z node 7 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-07-08T12:00:41.538622Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-07-08T12:00:41.538628Z node 7 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-07-08T12:00:41.538631Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-07-08T12:00:41.538635Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-07-08T12:00:41.538640Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-07-08T12:00:41.538650Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable |67.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |67.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |67.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-07-08T11:58:53.506443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:58:53.508883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:58:53.508922Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:58:53.509402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:58:53.509444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:58:53.509472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:58:53.509486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:58:53.509498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:58:53.509510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:58:53.509521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:58:53.509532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:58:53.509542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:58:53.509553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.509564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:58:53.509576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:58:53.514759Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:58:53.514972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:58:53.515002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:58:53.515045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.515100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:58:53.515113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:58:53.515120Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:58:53.515130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:58:53.515139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:58:53.515147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:58:53.515152Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:58:53.515172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:58:53.515180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:58:53.515190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:58:53.515194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:58:53.515204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:58:53.515211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:58:53.515219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:58:53.515223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:58:53.515232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:58:53.515240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:58:53.515245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:58:53.515272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:58:53.515281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:58:53.515285Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:58:53.515307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:58:53.515315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:58:53.515319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:58:53.515334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:58:53.515341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.515346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:58:53.515354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:58:53.515361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:58:53.515369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:58:53.515374Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:58:53.515417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=11; 2025-07-08T11:58:53.515433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=13; 2025-07-08T11:58:53.515443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-07-08T11:58:53.515454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:58:53.515465Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:58:53.515481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:58:53.515491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:58:53.515497Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:58:53.515512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:58:53.515518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;e ... um=70548;count=254;size_of_portion=184; 2025-07-08T12:00:42.438092Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=85;sum=27986;count=509; 2025-07-08T12:00:42.438098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=197;sum=52466;count=510;size_of_meta=112; 2025-07-08T12:00:42.438103Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=269;sum=70826;count=255;size_of_portion=184; 2025-07-08T12:00:42.438122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1583; 2025-07-08T12:00:42.438129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T12:00:42.438207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2025-07-08T12:00:42.438212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1692; 2025-07-08T12:00:42.438219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1709; 2025-07-08T12:00:42.438225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T12:00:42.438250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=21; 2025-07-08T12:00:42.438255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1799; 2025-07-08T12:00:42.438277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=16; 2025-07-08T12:00:42.438293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=11; 2025-07-08T12:00:42.438314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=16; 2025-07-08T12:00:42.438332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=12; 2025-07-08T12:00:42.438696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=359; 2025-07-08T12:00:42.439090Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=385; 2025-07-08T12:00:42.439098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=0; 2025-07-08T12:00:42.439103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T12:00:42.439108Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=0; 2025-07-08T12:00:42.439120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-07-08T12:00:42.439125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T12:00:42.439138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2025-07-08T12:00:42.439143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T12:00:42.439154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2025-07-08T12:00:42.439164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=5; 2025-07-08T12:00:42.439177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=8; 2025-07-08T12:00:42.439182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3746; 2025-07-08T12:00:42.439216Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119665952;raw_bytes=192854450;count=5;records=1855000} inactive {blob_bytes=632703072;raw_bytes=989320282;count=54;records=9818750} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T12:00:42.439237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:3127:5089];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T12:00:42.439245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:3127:5089];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T12:00:42.439256Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T12:00:42.439263Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T12:00:42.439289Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T12:00:42.439306Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T12:00:42.439320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975636611;tx_id=18446744073709551615;;current_snapshot_ts=1751975934533; 2025-07-08T12:00:42.439329Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T12:00:42.439339Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T12:00:42.439343Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T12:00:42.439362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T12:00:42.440360Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T12:00:42.440428Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T12:00:42.440434Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T12:00:42.440437Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T12:00:42.440442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T12:00:42.440456Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T12:00:42.440466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975636611;tx_id=18446744073709551615;;current_snapshot_ts=1751975934533; 2025-07-08T12:00:42.440473Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T12:00:42.440480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T12:00:42.440485Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T12:00:42.440496Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T12:00:42.440503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3127:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Validation |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService >> TOlapReboots::CreateDropTable [GOOD] >> TOlapReboots::CreateDropStore |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] >> TOlapReboots::CreateTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> Describe::DescribePartitionPermissions [GOOD] Test command err: 2025-07-08T11:58:17.194104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679183029367962:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:17.194225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b35/r3tmp/tmpwVrz4a/pdisk_1.dat 2025-07-08T11:58:17.257322Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:17.278979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:58:17.291290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:17.291318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:17.291689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63422, node 1 2025-07-08T11:58:17.305362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001b35/r3tmp/yandexsfTpIH.tmp 2025-07-08T11:58:17.305378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001b35/r3tmp/yandexsfTpIH.tmp 2025-07-08T11:58:17.305432Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001b35/r3tmp/yandexsfTpIH.tmp 2025-07-08T11:58:17.305464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:17.307863Z INFO: TTestServer started on Port 28173 GrpcPort 63422 TClient is connected to server localhost:28173 PQClient connected to localhost:63422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:17.354138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.357138Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.369294Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-07-08T11:58:17.370673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:17.593000Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679183029368552:2290], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:17.593107Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWQ5ZmE4OWEtNDYzYmEzMzEtY2EzZjZjZWYtYTY1Mjg5MTE=, ActorId: [1:7524679183029368550:2289], ActorState: ExecuteState, TraceId: 01jzmydaes8sksgpnfrz2yfzve, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:17.593951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-07-08T11:58:17.595231Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:17.793511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.816367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.845529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7524679183029368858:2563] 2025-07-08T11:58:18.197179Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:22.197132Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679183029367962:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:22.197686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T11:58:23.953040Z :ConnectToYDB INFO: TTopicSdkTestSetup started 2025-07-08T11:58:23.977616Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-07-08T11:58:23.990111Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-07-08T11:58:23.990547Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-07-08T11:58:23.990600Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-07-08T11:58:23.990605Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:23.990615Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-07-08T11:58:24.032529Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679208799172971:2750], now have 1 active actors on pipe 2025-07-08T11:58:24.032678Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:24.032753Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679208799172972:2751] connected; active server actors: 1 2025-07-08T11:58:24.032786Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-07-08T11:58:24.033288Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-07-08T11:58:24.033327Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-07-08T11:58:24.033640Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:24.034451Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-07-08T11:58:24.034455Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-07-08T11:58:24.034464Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679213094140327:2412], now have 1 active actors on pipe 2025-07-08T11:58:24.034477Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679213094140336:2787], now have 1 active actors on pipe 2025-07-08T11:58:24.034521Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:24.034739Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:24.034937Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitConfigStep 2025-07-08T11:58:24.034949Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] HandleOnInit TEvPQ::TEvProposePartitionConfig 2025-07-08T11:58:24.035017Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-07-08T11:58:24.035170Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [1:7524679213094140343:2416] 2025-07-08T11:58:24.035419Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Initializing completed. 2025-07-08T11:58:24.035423Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'test-topic' partition 0 generation 1 [1:7524679213094140343:2416] 2025-07-08T11:58:24.035430Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic test-topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-07-08T11:58:24.035531Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Process pending events. Count 1 2025-07-08T11:58:24.035560Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T11:58:24.035634Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:24.036299Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:24.039073Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1751975904077, TxId 281474976715672 2025-07-08T11:58:24.039153Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T11:58:24.040299Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupport ... otify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root/test-topic PathId: [OwnerId: 72057594046644480, LocalPathId: 9] DescribeSchemeResult: Status: StatusSuccess Path: "/Root/test-topic" PathDescription { Self { Name: "test-topic" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976715671 CreateStep: 1751976041340 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\031\010\001\020\200\004\032\020x-user-1@builtin \003\n\030\010\001\020\002\032\020x-user-2@builtin \003\n\031\010\001\020\200\004\032\020x-user-3@builtin \003\n\030\010\001\020\002\032\020x-user-3@builtin \003\n\031\010\001\020\200\004\032\020x-user-5@builtin \003\n\030\010\001\020\002\032\020x-user-6@builtin \003\n\031\010\001\020\200\004\032\020x-user-7@builtin \003\n\030\010\001\020\002\032\020x-user-7@builtin \003" EffectiveACL: "\n\034\010\001\020\200\200\002\032\020x-user-0@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-1@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-2@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-3@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-4@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-5@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-6@builtin \003(\001\n\034\010\001\020\200\200\002\032\020x-user-7@builtin \003(\001\n\031\010\001\020\200\004\032\020x-user-1@builtin \003\n\030\010\001\020\002\032\020x-user-2@builtin \003\n\031\010\001\020\200\004\032\020x-user-3@builtin \003\n\030\010\001\020\002\032\020x-user-3@builtin \003\n\031\010\001\020\200\004\032\020x-user-5@builtin \003\n\030\010\001\020\002\032\020x-user-6@builtin \003\n\031\010\001\020\200\004\032\020x-user-7@builtin \003\n\030\010\001\020\002\032\020x-user-7@builtin \003" PathVersion: 16 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 16 ACLVersion: 6 EffectiveACLVersion: 14 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "test-topic" PathId: 9 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [15:7524679799755147401:2415] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1751976041340 PathId: [OwnerId: 72057594046644480, LocalPathId: 9] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [15:7524679799755147401:2415] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1751976041340 PathId: [OwnerId: 72057594046644480, LocalPathId: 9] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2025-07-08T12:00:41.453408Z node 15 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-07-08T12:00:41.453436Z node 15 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request operation_params { } path: "test-topic" include_location: true 2025-07-08T12:00:41.453446Z node 15 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[15:7524679799755147662:2450]: Bootstrap 2025-07-08T12:00:41.453674Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [15:7524679773985342282:2112], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/test-topic TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:41.453698Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [15:7524679773985342282:2112], cookie# 13 2025-07-08T12:00:41.453708Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147439:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7524679799755147436:2801], cookie# 13 2025-07-08T12:00:41.453712Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147440:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7524679799755147437:2801], cookie# 13 2025-07-08T12:00:41.453716Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147441:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /Root/test-topic }: sender# [15:7524679799755147438:2801], cookie# 13 2025-07-08T12:00:41.453727Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147439:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679773985342176:2049], cookie# 13 2025-07-08T12:00:41.453729Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147440:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679773985342179:2052], cookie# 13 2025-07-08T12:00:41.453732Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][15:7524679799755147441:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679773985342182:2055], cookie# 13 2025-07-08T12:00:41.453738Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679799755147436:2801], cookie# 13 2025-07-08T12:00:41.453743Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Sync is in progress: cookie# 13, size# 3, half# 1, successes# 1, faulires# 0 2025-07-08T12:00:41.453747Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679799755147437:2801], cookie# 13 2025-07-08T12:00:41.453750Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Sync is done: cookie# 13, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-07-08T12:00:41.453757Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 16 Partial: 0 }: sender# [15:7524679799755147438:2801], cookie# 13 2025-07-08T12:00:41.453760Z node 15 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][15:7524679799755147435:2801][/Root/test-topic] Unexpected sync response: sender# [15:7524679799755147438:2801], cookie# 13 2025-07-08T12:00:41.453766Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [15:7524679773985342282:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/test-topic PathId: Partial: 0 } 2025-07-08T12:00:41.453776Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [15:7524679773985342282:2112], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/test-topic PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [15:7524679799755147435:2801] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 13 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1751976041340 PathId: [OwnerId: 72057594046644480, LocalPathId: 9] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:00:41.453787Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [15:7524679773985342282:2112], cacheItem# { Subscriber: { Subscriber: [15:7524679799755147435:2801] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 13 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1751976041340 PathId: [OwnerId: 72057594046644480, LocalPathId: 9] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/test-topic TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 13 IsSync: true Partial: 0 } 2025-07-08T12:00:41.453824Z node 15 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [15:7524679799755147663:2963], recipient# [15:7524679799755147662:2450], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/test-topic TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:41.453867Z node 15 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [15:7524679799755147662:2450]: Request location 2025-07-08T12:00:41.454047Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7524679799755147664:2451] connected; active server actors: 1 2025-07-08T12:00:41.454125Z node 15 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 15, Generation 1 2025-07-08T12:00:41.454131Z node 15 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [15:7524679799755147662:2450]: Got location 2025-07-08T12:00:41.454505Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7524679799755147664:2451] disconnected; active server actors: 1 2025-07-08T12:00:41.454508Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [15:7524679799755147664:2451] disconnected no session === status=SUCCESS issues= |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest |67.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |67.9%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> Secret::Simple >> Secret::Deactivated |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |67.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx3 [GOOD] Test command err: iteration# 3 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 9 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 15 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 21 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 27 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 33 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 39 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 45 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 51 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 57 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 63 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 69 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 75 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 81 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 87 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 93 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 99 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 105 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 111 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 117 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 123 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 129 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 135 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 141 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 147 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 153 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 159 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 165 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 171 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 177 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 183 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 189 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 195 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 201 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 207 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 213 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 219 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 225 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 231 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 237 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 243 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 249 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 255 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 261 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 267 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 273 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 279 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 285 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 291 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 297 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 303 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 309 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 315 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 321 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 327 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 333 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 339 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 345 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 351 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 357 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 363 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 369 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 375 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 381 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 387 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 393 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 399 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 405 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 411 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 417 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 423 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 429 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 435 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 441 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 447 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 453 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 459 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 465 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 471 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 477 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 483 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 489 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 495 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 501 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 507 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 513 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 519 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 525 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 531 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 537 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 543 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 549 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 555 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 561 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 567 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 573 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 579 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 585 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 591 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 597 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 603 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 609 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 615 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 621 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 627 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 633 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 639 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 645 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 651 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 657 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 663 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 669 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 675 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 681 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 687 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1365 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1371 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1377 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1383 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1389 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1395 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1401 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1407 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1413 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1419 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1425 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1431 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1437 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1443 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1449 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1455 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1461 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1467 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1473 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1479 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1485 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1491 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1497 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1503 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1509 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1515 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1521 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1527 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1533 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1539 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1545 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1551 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1557 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1563 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1569 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1575 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1581 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1587 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1593 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1599 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1605 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1611 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1617 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1623 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1629 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1635 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1641 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1647 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1653 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1659 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1665 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1671 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1677 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1683 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1689 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1695 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1701 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1707 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1713 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1719 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1725 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1731 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1737 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1743 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1749 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1755 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1761 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1767 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1773 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1779 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1785 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1791 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1797 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1803 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1809 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1815 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1821 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1827 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1833 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1839 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1845 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1851 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1857 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1863 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1869 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1875 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1881 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1887 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1893 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1899 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1905 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1911 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1917 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1923 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1929 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1935 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1941 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1947 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1953 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1959 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1965 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1971 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1977 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1983 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1989 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1995 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2001 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2007 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2013 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2019 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2025 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2031 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2037 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 |67.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TOlapReboots::DropTableThenStore [GOOD] |67.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:55.485345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.485367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.485373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.485377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.485382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.485386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.485394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.485411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.485483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.519091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:55.519121Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.528497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.528536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.528569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.529922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.530072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.530166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.530218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.531068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.531105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.531297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.531304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.531320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.531327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.531332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.531365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:55.537870Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.554985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:55.555043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.555092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:55.555128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:55.555137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.555792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.555812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:55.555849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.555857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:55.555861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:55.555866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:55.556188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.556196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:55.556200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:55.556501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.556507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.556512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.556518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.557054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:55.557379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:55.557412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:55.557572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.557592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:55.557597Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.557657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:55.557663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.557687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:55.557698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:55.558104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.558110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.558143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.558147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:55.558204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.558210Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:55.558219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.558223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.558227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.558230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.558234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:55.558238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 2057594046678944, stepId: 5000004 2025-07-08T12:00:45.182370Z node 105 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-07-08T12:00:45.182396Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:45.182417Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:00:45.182570Z node 105 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1003;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=4;result=not_found; 2025-07-08T12:00:45.183741Z node 105 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1003;fline=tx_controller.cpp:215;event=finished_tx;tx_id=1003; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-07-08T12:00:45.184272Z node 105 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:45.184281Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:45.184329Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T12:00:45.184356Z node 105 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:45.184362Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [105:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-07-08T12:00:45.184368Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [105:207:2209], at schemeshard: 72057594046678944, txId: 1003, path id: 4 2025-07-08T12:00:45.184455Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:00:45.184462Z node 105 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-07-08T12:00:45.184468Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:00:45.184604Z node 105 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.184617Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.184621Z node 105 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:00:45.184626Z node 105 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-07-08T12:00:45.184631Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:45.184783Z node 105 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.184794Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.184798Z node 105 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:00:45.184802Z node 105 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-07-08T12:00:45.184809Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T12:00:45.184818Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-07-08T12:00:45.189527Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:00:45.189554Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697639 2025-07-08T12:00:45.189577Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2025-07-08T12:00:45.189728Z node 105 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2025-07-08T12:00:45.189751Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2025-07-08T12:00:45.189765Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2025-07-08T12:00:45.189879Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.190242Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:00:45.190523Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:00:45.207841Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-07-08T12:00:45.207864Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:45.207885Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2025-07-08T12:00:45.217399Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:00:45.217449Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:00:45.217460Z node 105 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-07-08T12:00:45.217480Z node 105 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:00:45.217484Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:00:45.217489Z node 105 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:00:45.217492Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:00:45.217496Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-07-08T12:00:45.217513Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [105:367:2345] message: TxId: 1003 2025-07-08T12:00:45.217519Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:00:45.217528Z node 105 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-07-08T12:00:45.217532Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-07-08T12:00:45.217565Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:00:45.224284Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:00:45.224306Z node 105 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [105:428:2399] TestWaitNotification: OK eventTxId 1003 2025-07-08T12:00:45.224418Z node 105 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:45.224482Z node 105 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 71us result status StatusSuccess 2025-07-08T12:00:45.224608Z node 105 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |68.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TxUsage::The_Configuration_Is_Changing_As_We_Write_To_The_Topic [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-07-08T12:00:02.648072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 7250, node 1 TClient is connected to server localhost:8014 json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-07-08T12:00:10.006558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.006923Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.007103Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.007134Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.007282Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.007493Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.008039Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.008060Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:10.008378Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:10.190817Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:10.354292Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:10.361291Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:10.416108Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 12429, node 2 TClient is connected to server localhost:63995 2025-07-08T12:00:10.456737Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:10.456760Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:10.456764Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:10.456897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:20.178591Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.178843Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.179304Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.179333Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.180300Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.180387Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.180827Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.181170Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:20.181652Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:20.407545Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:20.571025Z node 11 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:20.576108Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:20.633351Z node 11 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 19860, node 11 TClient is connected to server localhost:2732 2025-07-08T12:00:20.660016Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:20.660038Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:20.660041Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:20.660147Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:28.601121Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.601902Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.602099Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.602241Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.602908Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.603043Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.603185Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.603409Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:28.603818Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:28.743753Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:28.871111Z node 20 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:28.876261Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:28.920550Z node 20 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 61419, node 20 TClient is connected to server localhost:14487 2025-07-08T12:00:28.952342Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:28.952358Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:28.952361Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:28.952414Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:36.844637Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.845355Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.845737Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.845965Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.845982Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.846270Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.846327Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.846551Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:36.846860Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:36.946328Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.088643Z node 29 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:37.096784Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:37.159250Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 14812, node 29 TClient is connected to server localhost:13494 2025-07-08T12:00:37.189620Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:37.189641Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:37.189645Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:37.189773Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:44.197910Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.198232Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.198334Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.198849Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.199047Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.199314Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.199351Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.199772Z node 43 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:44.200088Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-07-08T12:00:44.300045Z node 38 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:44.426835Z node 38 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-07-08T12:00:44.442596Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-07-08T12:00:44.486747Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 63982, node 38 TClient is connected to server localhost:14309 2025-07-08T12:00:44.519900Z node 38 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:44.519921Z node 38 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:44.519926Z node 38 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:44.520082Z node 38 :NET_CLASSIFIER ERROR: got bad distributable configuration |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.0%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |68.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropTableThenStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:55.165283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.165309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.165314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.165319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.165324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.165328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.165336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.165353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.165430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.177795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:55.177826Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.185331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.185395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.185434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.196911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.197088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.197188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.197247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.197710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.197753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.197968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.197978Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.197994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.198000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.198006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.198039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:55.199333Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.216426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:55.216490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.216541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:55.216578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:55.216588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.217256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.217280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:55.217322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.217331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:55.217336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:55.217340Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:55.217695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.217706Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:55.217711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:55.218025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.218035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.218041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.218047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.218565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:55.218915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:55.218952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:55.219125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.219146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:55.219152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.219217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:55.219223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.219247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:55.219259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:55.219625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.219633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.219669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.219674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:55.219734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.219740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:55.219750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.219754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.219758Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.219761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.219765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:55.219769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 944, LocalPathId: 1] 2025-07-08T12:00:46.146163Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:46.146191Z node 88 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:46.146196Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [88:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-07-08T12:00:46.146200Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [88:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-07-08T12:00:46.146308Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:00:46.146315Z node 88 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId# 1005:0 ProgressState at schemeshard: 72057594046678944 2025-07-08T12:00:46.146324Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId# 1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:00:46.146399Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.146410Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.146415Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T12:00:46.146423Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T12:00:46.146428Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:46.146503Z node 88 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.146512Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.146516Z node 88 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T12:00:46.146519Z node 88 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T12:00:46.146523Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:46.146531Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-07-08T12:00:46.147224Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:00:46.147249Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 0, tablet: 72075186233409546 2025-07-08T12:00:46.147287Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.147347Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-07-08T12:00:46.147352Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:46.147362Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-07-08T12:00:46.147370Z node 88 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 130 2025-07-08T12:00:46.147462Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T12:00:46.147735Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:00:46.147758Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:00:46.147763Z node 88 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:46.147777Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:46.147803Z node 88 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T12:00:46.147806Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:00:46.147811Z node 88 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T12:00:46.147814Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:00:46.147821Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-07-08T12:00:46.147824Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:00:46.147828Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2025-07-08T12:00:46.147832Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2025-07-08T12:00:46.147856Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:46.148261Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:00:46.148337Z node 88 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T12:00:46.148400Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:46.148521Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:46.148639Z node 88 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;self_id=[88:329:2316];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:863;event=tablet_die; Forgetting tablet 72075186233409546 2025-07-08T12:00:46.149779Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:00:46.149788Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:46.149802Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:46.150575Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T12:00:46.150590Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T12:00:46.150674Z node 88 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1005 2025-07-08T12:00:46.150724Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-07-08T12:00:46.150731Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-07-08T12:00:46.150789Z node 88 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-07-08T12:00:46.150806Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-07-08T12:00:46.150810Z node 88 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [88:547:2517] TestWaitNotification: OK eventTxId 1005 2025-07-08T12:00:46.150880Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:46.150910Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 44us result status StatusPathDoesNotExist 2025-07-08T12:00:46.150950Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/OlapStore/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T12:00:46.151019Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:46.151035Z node 88 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 17us result status StatusPathDoesNotExist 2025-07-08T12:00:46.151051Z node 88 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit [GOOD] Test command err: 2025-07-08T11:58:17.606670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679183158792696:2229];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:17.613102Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b30/r3tmp/tmpsV9siB/pdisk_1.dat 2025-07-08T11:58:17.649363Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T11:58:17.686443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4318, node 1 2025-07-08T11:58:17.703734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001b30/r3tmp/yandexOXHWAF.tmp 2025-07-08T11:58:17.703744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001b30/r3tmp/yandexOXHWAF.tmp 2025-07-08T11:58:17.720438Z INFO: TTestServer started on Port 14732 GrpcPort 4318 TClient is connected to server localhost:14732 2025-07-08T11:58:17.745797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:17.745842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:17.746305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T11:58:17.877050Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001b30/r3tmp/yandexOXHWAF.tmp 2025-07-08T11:58:17.877428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration PQClient connected to localhost:4318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T11:58:17.893865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.896794Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.904581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:18.098013Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679187453760543:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:18.098337Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTZiMjI4YWQtZDY0OGI0ZjMtYWJmZDdhODgtNTM5NzAyZTU=, ActorId: [1:7524679187453760541:2290], ActorState: ExecuteState, TraceId: 01jzmydayg7fg615w8z4a1bz2v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:18.099637Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:18.604308Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:19.103808Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679191748727882:2296], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:19.104216Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDczNzAwYWMtNzA2ZmQyZGUtZTJmYzk2OS02ZjIwM2Zk, ActorId: [1:7524679191748727880:2295], ActorState: ExecuteState, TraceId: 01jzmydbywdfg8bc5h0md18t90, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:19.104345Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:20.112209Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679196043695190:2301], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:20.112601Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThiOWE1NmUtMjRjOGNhNDktZGEzNDhiNWEtYTllMDNlNTY=, ActorId: [1:7524679196043695188:2300], ActorState: ExecuteState, TraceId: 01jzmydcy9d1nnnn9nwkcs41z4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:20.112723Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:21.116885Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679200338662498:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:21.117398Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2NmMWY4ZDEtOGJkN2U0N2QtYjQ1Nzk2OGYtNzIxMWI2YWQ=, ActorId: [1:7524679200338662496:2305], ActorState: ExecuteState, TraceId: 01jzmyddxs6qvhf4kq64yw4yeq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:21.117536Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:22.121182Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679204633629806:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:22.121483Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTViZjRkYjQtNmUyNTU1NzctZjc1MDQ1M2MtZWM5OTczMDc=, ActorId: [1:7524679204633629804:2310], ActorState: ExecuteState, TraceId: 01jzmydex53mwkwa6m9ajd02de, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:22.121592Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:22.605994Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679183158792696:2229];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:22.606506Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T11:58:23.080567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:23.098769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:23.129945Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679208928597268:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code ... :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (6-11) 2025-07-08T12:00:44.181202Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Returning serverBytesSize = 0 to budget 2025-07-08T12:00:44.181212Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (12-17) 2025-07-08T12:00:44.181215Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Returning serverBytesSize = 0 to budget 2025-07-08T12:00:44.181222Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (18-19) 2025-07-08T12:00:44.181224Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Returning serverBytesSize = 0 to budget 2025-07-08T12:00:44.181338Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-07-08T12:00:44.181377Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-07-08T12:00:44.181415Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-07-08T12:00:44.181428Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2025-07-08T12:00:44.181564Z :DEBUG: [/Root] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-07-08T12:00:44.181581Z :DEBUG: [/Root] Take Data. Partition 0. Read: {4, 0} (5-5) 2025-07-08T12:00:44.181599Z :DEBUG: [/Root] Take Data. Partition 0. Read: {5, 0} (6-6) 2025-07-08T12:00:44.181615Z :DEBUG: [/Root] Take Data. Partition 0. Read: {6, 0} (7-7) 2025-07-08T12:00:44.181767Z :DEBUG: [/Root] Take Data. Partition 0. Read: {7, 0} (8-8) 2025-07-08T12:00:44.181791Z :DEBUG: [/Root] Take Data. Partition 0. Read: {7, 1} (9-9) 2025-07-08T12:00:44.181810Z :DEBUG: [/Root] Take Data. Partition 0. Read: {8, 0} (10-10) 2025-07-08T12:00:44.181827Z :DEBUG: [/Root] Take Data. Partition 0. Read: {9, 0} (11-11) 2025-07-08T12:00:44.181845Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 0} (12-12) 2025-07-08T12:00:44.181867Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 1} (13-13) 2025-07-08T12:00:44.181888Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 2} (14-14) 2025-07-08T12:00:44.181904Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 3} (15-15) 2025-07-08T12:00:44.182169Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 4} (16-16) 2025-07-08T12:00:44.182190Z :DEBUG: [/Root] Take Data. Partition 0. Read: {10, 5} (17-17) 2025-07-08T12:00:44.182205Z :DEBUG: [/Root] Take Data. Partition 0. Read: {11, 0} (18-18) 2025-07-08T12:00:44.182221Z :DEBUG: [/Root] Take Data. Partition 0. Read: {11, 1} (19-19) 2025-07-08T12:00:44.182235Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] The application data is transferred to the client. Number of messages 20, size 2000000 bytes 2025-07-08T12:00:44.182243Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Returning serverBytesSize = 0 to budget 0 20 2025-07-08T12:00:44.182550Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Commit offsets [0, 20). Partition stream id: 1 2025-07-08T12:00:44.183270Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 grpc read done: success# 1, data# { read_request { bytes_size: 2002648 } } 2025-07-08T12:00:44.183356Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 got read request: guid# 51fdf91d-23874139-24953bf0-cf6bc2fb 2025-07-08T12:00:44.184511Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 1 offsets { end: 20 } } } } 2025-07-08T12:00:44.184599Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) committing to position 20 prev 0 end 20 by cookie 2 2025-07-08T12:00:44.188714Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic_A' requestId: 2025-07-08T12:00:44.188732Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'topic_A' partition 0 2025-07-08T12:00:44.188782Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer offset is set to 20 (startOffset 0) session test-consumer_13_1_33754215606715482_v1 2025-07-08T12:00:44.188817Z node 13 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:00:44.189296Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] Topic 'topic_A' partition 0 user test-consumer readTimeStamp for offset 20 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T12:00:44.189312Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:00:44.189324Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'topic_A' partition: 0 messageNo: 0 requestId: cookie: 2 2025-07-08T12:00:44.189354Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 2 } 2025-07-08T12:00:44.189361Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 20 endOffset 20 with cookie 2 2025-07-08T12:00:44.189373Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 replying for commits: assignId# 1, from# 2, to# 2, offset# 20 2025-07-08T12:00:44.194054Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 20 } } 2025-07-08T12:00:45.160157Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:19:20 2025-07-08T12:00:45.160188Z :INFO: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 2000000 MessagesRead: 20 BytesReadCompressed: 2000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:45.166718Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 checking auth because of timeout 2025-07-08T12:00:45.166751Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 auth for : test-consumer 2025-07-08T12:00:45.168908Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 Handle describe topics response 2025-07-08T12:00:45.168942Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 auth is DEAD 2025-07-08T12:00:45.168998Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 auth ok: topics# 1, initDone# 1 2025-07-08T12:00:46.168177Z :INFO: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] Closing read session. Close timeout: 0.000000s 2025-07-08T12:00:46.168197Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:19:20 2025-07-08T12:00:46.168210Z :INFO: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2008 BytesRead: 2000000 MessagesRead: 20 BytesReadCompressed: 2000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:46.168230Z :NOTICE: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-07-08T12:00:46.168239Z :DEBUG: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] [] Abort session to cluster 2025-07-08T12:00:46.168389Z :NOTICE: [/Root] [/Root] [8c47619c-9a97cd16-d455287b-d3b16961] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:00:46.168598Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2025-07-08T12:00:46.168608Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0] PartitionId [0] Generation [2] Write session will now close 2025-07-08T12:00:46.168615Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0] PartitionId [0] Generation [2] Write session: aborting 2025-07-08T12:00:46.168708Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2025-07-08T12:00:46.168713Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0] PartitionId [0] Generation [2] Write session: destroy 2025-07-08T12:00:46.168596Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 grpc read done: success# 0, data# { } 2025-07-08T12:00:46.168611Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 grpc read failed 2025-07-08T12:00:46.168615Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 grpc closed 2025-07-08T12:00:46.168628Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_33754215606715482_v1 is DEAD 2025-07-08T12:00:46.168820Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_33754215606715482_v1 2025-07-08T12:00:46.168826Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679813434860463:2499] destroyed 2025-07-08T12:00:46.168856Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [13:7524679813434860460:2496] disconnected; active server actors: 1 2025-07-08T12:00:46.168860Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [13:7524679813434860460:2496] client test-consumer disconnected session test-consumer_13_1_33754215606715482_v1 2025-07-08T12:00:46.168887Z node 13 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_13_1_33754215606715482_v1 2025-07-08T12:00:46.169112Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0 grpc read done: success: 0 data: 2025-07-08T12:00:46.169123Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0 grpc read failed 2025-07-08T12:00:46.169436Z node 13 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 5 sessionId: test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0 2025-07-08T12:00:46.169440Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-message_group_id|7961c469-18582b6a-f479d0cd-1d0fe7af_0 is DEAD 2025-07-08T12:00:46.169560Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:00:46.169606Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679804844925769:2468] destroyed 2025-07-08T12:00:46.169727Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. |68.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> TUserAttrsTestWithReboots::Reboots [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::TestListNodesEpochDeltas >> TSlotIndexesPoolTest::Expansion [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |68.1%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::Reboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:29.361150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:29.361172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:29.361177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:29.361182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:29.361193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:29.361197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:29.361204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:29.361223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:29.361309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:29.374601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:29.374623Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:29.378024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:29.378071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:29.378100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:29.379549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:29.379655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:29.379745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.379785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:29.380166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.380200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:29.380422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:29.380432Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.380454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:29.380465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:29.380471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:29.380506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:29.381477Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:29.396540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:29.396613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.396682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:29.396723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:29.396733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.401353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.401391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:29.401458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.401484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:29.401489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:29.401495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:29.406566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.406588Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:29.406595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:29.407143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.407159Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.407165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.407171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.407900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:29.409135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:29.409172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:29.409380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:29.409409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:29.409417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.409496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:29.409506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:29.409534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:29.409547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:29.410029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:29.410038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:29.410081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:29.410087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:29.410153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:29.410161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:29.410172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:29.410176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.410181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:29.410184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:29.410188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:29.410193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... ompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-07-08T12:00:47.721402Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-07-08T12:00:47.721406Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [60:373:2364] TestWaitNotification: OK eventTxId 1005 2025-07-08T12:00:47.721467Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:47.721490Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 32us result status StatusSuccess 2025-07-08T12:00:47.721550Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 1006 2025-07-08T12:00:47.722056Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirB" UserAttributes { Key: "AttrA3" } UserAttributes { Key: "AttrA1" } } } TxId: 1006 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:47.722081Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirB, operationId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T12:00:47.722096Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T12:00:47.722113Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1006:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:47.722117Z node 60 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T12:00:47.722514Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1006, response: Status: StatusAccepted TxId: 1006 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:47.722537Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1006, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirB 2025-07-08T12:00:47.722562Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T12:00:47.722566Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 1006:0, at schemeshard: 72057594046678944 2025-07-08T12:00:47.722572Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1006 ready parts: 1/1 2025-07-08T12:00:47.722589Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1006 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:47.722944Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1006:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1006 msg type: 269090816 2025-07-08T12:00:47.722981Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1006, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1006 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1006 at step: 5000006 2025-07-08T12:00:47.723037Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:47.723053Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1006 Coordinator: 72057594046316545 AckTo { RawX1: 123 RawX2: 257698039909 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:47.723059Z node 60 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 1006:0, stepId:5000006, at schemeshard: 72057594046678944 2025-07-08T12:00:47.723085Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T12:00:47.723089Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T12:00:47.723093Z node 60 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1006:0 progress is 1/1 2025-07-08T12:00:47.723096Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T12:00:47.723103Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:47.723112Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-07-08T12:00:47.723119Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:00:47.723123Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T12:00:47.723126Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2025-07-08T12:00:47.723130Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2025-07-08T12:00:47.723135Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:00:47.723139Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 1, subscribers: 0 2025-07-08T12:00:47.723143Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 3], 6 2025-07-08T12:00:47.723520Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:47.723530Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1006, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:47.723551Z node 60 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:47.723555Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [60:209:2211], at schemeshard: 72057594046678944, txId: 1006, path id: 3 FAKE_COORDINATOR: Erasing txId 1006 2025-07-08T12:00:47.723625Z node 60 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:00:47.723633Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 6 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:00:47.723637Z node 60 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T12:00:47.723641Z node 60 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 6 2025-07-08T12:00:47.723646Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:00:47.723658Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-07-08T12:00:47.723956Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestWaitNotification wait txId: 1006 2025-07-08T12:00:47.724056Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-07-08T12:00:47.724062Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-07-08T12:00:47.724115Z node 60 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-07-08T12:00:47.724128Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-07-08T12:00:47.724132Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [60:390:2381] TestWaitNotification: OK eventTxId 1006 2025-07-08T12:00:47.724195Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:47.724213Z node 60 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 25us result status StatusSuccess 2025-07-08T12:00:47.724262Z node 60 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-07-08T12:00:48.413694Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.416080Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.443664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:48.443683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:48.447752Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:48.448156Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:48.448220Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:48.448351Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:48.448900Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:48.448932Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:48.448988Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:48.449004Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-07-08T12:00:48.449011Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-07-08T12:00:48.470542Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:48.470581Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.024000Z 2025-07-08T12:00:48.470587Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:48.480886Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:203:2200], Recipient [1:176:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.481340Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:165:2175], Recipient [1:176:2181]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:48.481353Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:48.481364Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:48.481428Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:48.481443Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:16:2063], path# /dc-1, domainOwnerId# 72057594046678944 2025-07-08T12:00:48.487745Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-07-08T12:00:48.487819Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:16:2063], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:205:2201] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:00:48.487871Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:205:2201] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:48.487924Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:212:2202], recipient# [1:204:2181], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:48.487938Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:48.487954Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:48.487968Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:204:2181], Recipient [1:176:2181]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.487971Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.487983Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:48.487986Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:48.488056Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1024 host1:1001 to database resolvehost=host1.host1.host1 address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-07-08T12:00:48.488081Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1024 host1:1001 2025-07-08T12:00:48.488085Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=2 2025-07-08T12:00:48.488092Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 1 to 2 2025-07-08T12:00:48.501275Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:48.501298Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1024 host1:1001 2025-07-08T12:00:48.501306Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 1 to 2 2025-07-08T12:00:48.501310Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2025-07-08T12:00:48.501366Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } 2025-07-08T12:00:48.501479Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:216:2206], Recipient [1:176:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.501509Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:165:2175], Recipient [1:176:2181]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:48.501515Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:48.501526Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:48.501567Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:16:2063], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:48.501591Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:16:2063], cacheItem# { Subscriber: { Subscriber: [1:205:2201] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:48.501632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:218:2207], recipient# [1:217:2181], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:48.501645Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:48.501655Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:48.501667Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:217:2181], Recipient [1:176:2181]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.501670Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.501681Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:48.501685Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:48.501709Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1025 host2:1001 to database resolvehost=host2.host2.host2 address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-07-08T12:00:48.501740Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1025 host2:1001 2025-07-08T12:00:48.501747Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=3 2025-07-08T12:00:48.501755Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 2 to 3 2025-07-08T12:00:48.517335Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:48.517365Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1025 host2:1001 2025-07-08T12:00:48.517374Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 2 to 3 2025-07-08T12:00:48.517378Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2025-07-08T12:00:48.517443Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200024000 Name: "slot-1" } ... waiting for cache miss 2025-07-08T12:00:48.517522Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.517548Z node 1 :NAMESERVICE DEBUG: New cache miss: nodeId# 1024, deadline# 18446744073709.551615s 2025-07-08T12:00:48.517556Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1025 Deadline: 1.034024s } 2025-07-08T12:00:48.517562Z node 1 :NAMESERVICE DEBUG: New cache miss: nodeId# 1025, deadline# 1.034024s 2025-07-08T12:00:48.517566Z node 1 :NAMESERVICE DEBUG: Schedule wakeup for new earliest deadline 1.034024s 2025-07-08T12:00:48.517627Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:224:2210], Recipient [1:176:2181]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.517645Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:224:2210] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) 2025-07-08T12:00:48.569203Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:18:2065], Recipient [1:176:2181]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-07-08T12:00:48.569225Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:48.569241Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.024000Z - 1970-01-01T01:00:00.024000Z - 1970-01-01T02:00:00.024000Z 2025-07-08T12:00:48.593450Z node 1 :NAMESERVICE DEBUG: HandleWakeup at 1.035024s 2025-07-08T12:00:48.593481Z node 1 :NAMESERVICE DEBUG: Cache miss failed: nodeId=1025, error=Deadline exceeded ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR 2025-07-08T12:00:48.593570Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:221:2065], Recipient [1:176:2181]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:48.593581Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:48.593620Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } 2025-07-08T12:00:48.593640Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.host1.host1" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200024000 Name: "slot-0" } } 2025-07-08T12:00:48.593654Z node 1 :NAMESERVICE DEBUG: Cache miss succeed: nodeId=1024 2025-07-08T12:00:48.593667Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvResolveNode { NodeId: 1024 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.593684Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:223:2065], Recipient [1:176:2181]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-07-08T12:00:48.593687Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:48.593701Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.host2.host2" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200024000 Name: "slot-1" } } >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] |68.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |68.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-07-08T12:00:48.687103Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-07-08T12:00:48.758648Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-07-08T12:00:48.962534Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |68.1%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] |68.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-07-08T12:00:48.990439Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-07-08T12:00:49.433668Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-07-08T12:00:49.370522Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-07-08T12:00:49.667607Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> ExternalBlobsMultipleChannels::Simple >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |68.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-07-08T12:00:48.391654Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404075Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404141Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404172Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404195Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404236Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404273Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.404311Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408130Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408150Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408176Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408196Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408206Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408216Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.408225Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.412130Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.412639Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.412921Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413325Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413388Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413413Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413468Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413517Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413623Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.413655Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.413737Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413758Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413771Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.413789Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.413903Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.413941Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.414057Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414069Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.414090Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414181Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414196Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.414216Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414278Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414359Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414381Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414516Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.414559Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.416573Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.416606Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.416676Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.416742Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.417112Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.417146Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418205Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418404Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418739Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418770Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418853Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.418945Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419000Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419028Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419793Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419832Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419925Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.419965Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.420391Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.420502Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.421903Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.422035Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.447001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:48.447020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:48.455084Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:48.455542Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:48.455629Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:48.455780Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:48.456627Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:48.456665Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:48.456698Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:48.456711Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.456717Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.488718Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:48.488758Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:48.488765Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:48.499155Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2207], Recipient [1:572:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.499571Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:561:2180], Recipient [1:572:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:48.499584Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:48.499600Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 ... eTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:612:2210] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:00:48.521640Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:612:2210] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:48.521769Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:619:2211], recipient# [1:611:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:48.521802Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:48.521830Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:48.521856Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:611:2186], Recipient [1:572:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.521864Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.521888Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:48.521893Z node 1 :NODE_BROKER DEBUG: Registration request from host1:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:48.522063Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1024 host1:1001 to database resolvehost=host1.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-07-08T12:00:48.522116Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1024 host1:1001 2025-07-08T12:00:48.522122Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=2 2025-07-08T12:00:48.522135Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 1 to 2 2025-07-08T12:00:48.536191Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:48.536220Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1024 host1:1001 2025-07-08T12:00:48.536228Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 1 to 2 2025-07-08T12:00:48.536234Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2025-07-08T12:00:48.536304Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-07-08T12:00:48.536511Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:623:2215], Recipient [1:572:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.536543Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039948, Sender [1:561:2180], Recipient [1:572:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvGracefulShutdownRequest { NodeId: 1024 } 2025-07-08T12:00:48.536550Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvGracefulShutdownRequest 2025-07-08T12:00:48.536556Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvGracefulShutdownRequest: request# NodeId: 1024 2025-07-08T12:00:48.536571Z node 1 :NODE_BROKER DEBUG: TTxGracefulShutdown Execute. Graceful Shutdown request from 1024 2025-07-08T12:00:48.536577Z node 1 :NODE_BROKER DEBUG: [DB] Release slot index (0) node #1024 host1:1001 in database 2025-07-08T12:00:48.547875Z node 1 :NODE_BROKER DEBUG: TTxGracefulShutdown Complete 2025-07-08T12:00:48.548027Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:627:2219], Recipient [1:572:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.548112Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:561:2180], Recipient [1:572:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:48.548120Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:48.548132Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:48.548185Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:48.548215Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:612:2210] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:48.548273Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:629:2220], recipient# [1:628:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:48.548292Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:48.548305Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:48.548320Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:628:2186], Recipient [1:572:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.548325Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:48.548343Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:48.548348Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:48.548379Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.5 dc=1 location=DC=1/M=2/R=3/U=5/ lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-07-08T12:00:48.548426Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1025 host2:1001 2025-07-08T12:00:48.548432Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=3 2025-07-08T12:00:48.548442Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 2 to 3 2025-07-08T12:00:48.561925Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:48.561958Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1025 host2:1001 2025-07-08T12:00:48.561969Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 2 to 3 2025-07-08T12:00:48.561975Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2025-07-08T12:00:48.562046Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.5" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "5" } Expire: 7200025000 Name: "slot-0" } |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/storagepoolmon/ut/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> ExternalBlobsMultipleChannels::WithCompaction >> ExternalBlobsMultipleChannels::SingleChannel |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit |68.2%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TNodeBrokerTest::LoadStateMoveEpoch |68.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |68.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |68.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |68.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |68.3%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |68.3%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSlotIndexesPoolTest::Init [GOOD] >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> TNodeBrokerTest::BasicFunctionality |68.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining >> TNodeBrokerTest::UpdateEpochPipelining >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> TSlotIndexesPoolTest::Basic [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-07-08T12:00:48.424251Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434697Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434742Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434762Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434780Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434807Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434830Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.434853Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441284Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441326Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441342Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441357Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441372Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441387Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.441404Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.444281Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.448187Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.449625Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.449975Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450052Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450089Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450121Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450153Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450521Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.450619Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.450696Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450764Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.450821Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.450871Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.450894Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.450962Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451046Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451074Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.451174Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:48.451249Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451367Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451378Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451405Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.451576Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.452316Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:48.477552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:48.477575Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:48.481720Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:48.482099Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:48.482171Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:48.482317Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:48.482960Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:48.483106Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:48.483137Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:48.483148Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.483154Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.525142Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:48.525197Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:48.525205Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:48.535568Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:584:2207], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.535954Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:548:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:48.535964Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:48.535977Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.676968Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:608:2211], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677043Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:548:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 1 } 2025-07-08T12:00:48.677051Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:48.677065Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:48.677103Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:608:2211] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677556Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:613:2212], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677616Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:614:2213], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677643Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:613:2212] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677657Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:615:2214], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677686Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:616:2215], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677692Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:614:2213] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677701Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:615:2214] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677782Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:617:2216], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677810Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:617:2216] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677819Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:618:2217], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:48.677825Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:616:2215] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:00:48.677841Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:613:2212] 2025-07-08T12:00:48.677844Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:48.677853Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.0 ... .688027Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1027 host4:1001 2025-07-08T12:00:49.688031Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=7 2025-07-08T12:00:49.688038Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 6 to 7 2025-07-08T12:00:49.698939Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:49.698966Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1027 host4:1001 2025-07-08T12:00:49.698975Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 6 to 7 2025-07-08T12:00:49.698980Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:1001 to epoch cache 2025-07-08T12:00:49.699040Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 1001 ResolveHost: "host4.yandex.net" Address: "1.2.3.7" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "7" } Expire: 14400025000 Name: "slot-3" } 2025-07-08T12:00:49.699165Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:768:2318], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:49.699190Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:49.699195Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:49.699206Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:49.699355Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:770:2320], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:49.699368Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-07-08T12:00:49.699372Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:49.699379Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:49.699430Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:772:2322], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:49.699442Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:49.699446Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:49.699450Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:49.699504Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:774:2324], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:49.699513Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 5 } 2025-07-08T12:00:49.699517Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:49.699522Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:49.699572Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:776:2326], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:49.699583Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:49.699587Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:49.699591Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.7 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:49.952683Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:718:2282], Recipient [1:718:2282]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:49.952709Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:49.952726Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2025-07-08T12:00:49.952737Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:49.952764Z node 1 :NODE_BROKER DEBUG: [Dirty] Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.213416Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:802:2333], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.213460Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:802:2333] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-07-08T12:00:50.213523Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-07-08T12:00:50.213532Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.213537Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2025-07-08T12:00:50.213643Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213662Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213719Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213730Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213810Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213821Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.213834Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:50.233308Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:50.233339Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.233368Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:50.233373Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=4 expired=0 2025-07-08T12:00:50.233419Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257227Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:815:2338], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257286Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:50.257293Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257306Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257383Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:817:2340], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257395Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:50.257398Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257406Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257455Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:819:2342], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257466Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:50.257469Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257474Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257525Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:821:2344], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257542Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 8 } 2025-07-08T12:00:50.257546Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257551Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257599Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:823:2346], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257611Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:50.257614Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257619Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:50.257670Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:825:2348], Recipient [1:718:2282]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:50.257679Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:718:2282]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { CachedVersion: 6 } 2025-07-08T12:00:50.257683Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:50.257688Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z >> TNodeBrokerTest::NodeNameExpiration |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-07-08T12:00:50.274201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001981/r3tmp/tmpZAUm4K/pdisk_1.dat 2025-07-08T12:00:50.400853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:50.418214Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:50.450975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:50.451009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:50.461639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:50.539246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:50.805384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj02efd1tm9190mnktpgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNkYmVmN2ItMjU1Mjc0OGItMWI5M2Q4ZTYtZjdkMmQ0YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.817545Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj03r9xnwdgh6nfbcdaxq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM3YzI0MzYtY2ZiMWMyNzUtY2Q3N2I4ZjEtZTUwZDRhZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.827953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj0428bba23zcc841k7nh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzcwN2UxZWMtOGZmYWQzNTAtMjBmMjliZjYtMWJmMTg2YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.838869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj04d66378ygfvqn51ds4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIxY2M4MjktZmI1ZWQ2ZmEtN2QwNDNhYmQtMmU0MGU3ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.849663Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyj04r4kxjn7bf43wsdsyb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE2ZWQ4Y2QtZGVhMDcyMy1jNzQ1NWUzNS1jYThkZTI2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.860074Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyj052109f3gtqr7xakser, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJlNDEyZDctNDY4ZDA1NjUtZGQyNjRkMmItNjU3ZWYxNDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.870656Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj05dc2yet7m5a7a0fbx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdmNjM0ZDktNjExMDYwMDItZDNkYzRmYzMtZWUxZWYwM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.881366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj05qdwmyvs84ybdk6bvk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE3YTZhNTEtOTUzZjEwYmEtMWEwMGVjNjMtODI0NTc4ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.891673Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyj062b1y8hp19t5s3vkem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVkMjhhZTYtNDIyODQ2YmYtNTliMWNhOC1iMDBiM2U4NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.903059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyj06c46kkwvdjpq908ye7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkOGNkMTUtZjlkMGU1Y2MtYmE1YWRhNzMtNGI3M2U0MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.913794Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyj06r0dbb8baq9fbq52rs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTU4YmRlNmMtYmFiY2I4MjMtM2FhMDRjMDctYWNiOTM4MDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.924464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyj073ac10h9ee0gkhjm5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjEyYWE5YmUtOTExMzY3YzgtZTkwNWVkNTgtNDZjZDg5MTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.935354Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyj07dazhkm2tjbvebcj2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJjOWExODItMzhjNzczZWYtMjhkNDk0YzgtODAzOTIyNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.946029Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyj07rbc84k7bph4ktv8h0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYyOTcxMDktZThiMDQwMjAtOTAyMGY2YTMtZTdiMWM5OGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.956300Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyj083ee2xvb2edwf0c121, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNkZTg1MjMtZWUxMWI2ZDQtYWE1MWNjYjgtNjVhMGM3MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.965947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyj08d41ev1ngjdgz50nbc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmExYzJhNWQtNTJhMmQ3MzktNDM0N2FjNGEtNjYyMzE0OWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.975759Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyj08qcafwa9vx3c2v7c9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzczYjcyMDQtZWU0MTRjODUtMzQyY2I2MWItZmE1YTIzNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.986019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyj0910shr28pq957w6e9g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4YmU2ZDctZmM4NjMwNmEtMWIyNWNiYjAtYTQ5ZDUyMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:50.995949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyj09bdyhhtrsha5c8antv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGViMmIxZmUtMTgxMjU4OTQtZjViNmU3ZGUtMzM0ZDBjMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.006082Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyj09ncjf0mqennj0zgjsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjllOTU5ZjEtOGY0OTEwOTktZThjOTQzYS00OTQwM2I5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.013432Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyj09z9jewftajaz6z9knv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYyMGEzZjktMjAxMzYzZmYtNWJiNmQ2OTYtYmI4MGExZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.028284Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyj0a62jmkp7wftjxabtz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjZmOWE4ZjAtYmFiMzBjZjItMmY2MWQ3My1lMzUwYzUzOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.038879Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmyj0anen92jzbt5rywa95b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdjZmEzMGItY2M0ZDNhYzgtZTc4MDYwZDEtYWFkOWZkNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.050039Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmyj0b05v2zyat1pnmqkaeq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI3NzViNzgtNTJhMzBhMDctNGM3YTY4ZjgtNGQ2NGIyNzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.061407Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmyj0bbeyj7f51v7hd7h12x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU5YTE4MGQtMjczZjJkM2QtMTRlODhkOWYtYmIzNmJkYjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.071954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmyj0bp5qfv1gg7f9a6bxfv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNmOWFmNTctODVmNmQwMDgtMWRlM2E1MmEtY2MzMGY4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.083081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmyj0c1emc27gs30xz7hsq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ViMjNmZGQtZDgyZjAwZmQtYTRhZTAwNGUtMjliMGQ2YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.094047Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmyj0cc09520hq9w9rdzjp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTFhNWY2NzUtNzQwMzdkZGQtNWQxOWQ2ZmMtNjEzMGU4NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.105968Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmyj0cq1xbnt8aztwy672p4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZjZWFkOS04MGZhYWIyNC0xZTRlMDRlMy05ODU2ZWNlMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.117858Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jzmyj0d39b6dpe10pwybhkdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU1NDA0NDctNjc4ZWIyNmYtNWEwNjMwZjgtMjc5YjAyNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.129403Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmyj0df0vjadba2yycn8fpc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhjZmRkYi0yNDFiNWU3MS0yNDM1ZDU0MC1hOTU2N2Jl, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.140701Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmyj0dt4spbasq44d81e8ta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY5YWMzYjktNWU1NzVmMzQtM2UzN2RjYTItMmM4OGE4MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.151619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmyj0e6fcgm9cnmn2a74ebp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI2NTRhZmEtZjU1NjAxN2UtYWMwMmJkZDctZmI4Njk2YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not ... myj0sa4gsyjcnymdxms31m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRjNWJlMzUtMTExOGVmZWMtZTc3NmYwNDctYTk2OTU5ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.517811Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715723. Ctx: { TraceId: 01jzmyj0sm1zeay6yz67pkz5yw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNlM2M4MjgtZWI5ZDc5NTktNjhmYmQxYjYtYzk1MjNlYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.528117Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmyj0sz9mtkcqexf7eq92n6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1YzAwOTMtODBhN2M3MjEtNzEyMjBiYzItZWI1Y2MzYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.538786Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jzmyj0t9atpe908cgdtda1r9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcxZTg5OTEtZTg3YTkxMi1iODBhYjBmMy0zZTI3ZDQyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.549827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jzmyj0tm01s6a2ha84v9n3vv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg0ZDY0YTYtZjdjYmM2Y2UtNTBlYWZlZTEtMTNhNzM3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.560801Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jzmyj0tz7re30w1jdzkbe799, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYxZWI1ZTgtOTlmNjE3M2MtZjJiZTE4MGMtNDU2YzAyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.572556Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jzmyj0va543qegrdm1tjj251, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVmYmFjNWItYzlhMGE3MGEtNzBlYTNkZjctODhjY2Q1ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.583921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jzmyj0vpdp709mvmvkbwyvds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdjYWRiMDUtNTI5YTE1M2QtYzJmMDBiZDItNzc0NzY5YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.594753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jzmyj0w19dmhj069fzzf7ssv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM0ZGE1ZTMtNTMzMmUyMzYtYzM5Nzk1ZjgtMTIzZDliOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.605022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jzmyj0wbavq5qy2y50qxcfve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJmNTg0NzctODU5ZGM3MGUtMzBjYjllYS0yNjI0ZWIwMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.615874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jzmyj0wpe8s3t1e9jfm0v29s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdhMDljNWQtNGMxOWQ1MjctYTEyODdkNWQtYTgzY2RlODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.626359Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jzmyj0x1b8xk7hvjcanx3gqt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhkN2U4MGEtNjY0NmI5NDQtZjQ4YzZiN2UtYTJhZGMwYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.636528Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jzmyj0xbc45sxea7achr3cfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRhMWUyYjEtZjQ4MmEwZGEtOTk1ODI5MWMtNGJkZDI2Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.646333Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jzmyj0xn1sbxpdmb4x38tdw2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE4MWYwMTQtOTY5YWRkMzktNzVkOTAwMmUtYWJhZWI2YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.655854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jzmyj0xz6mwqaj5xz0zs5s7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk3YzQ5MGItYjQ1NDM3YTgtZmYzNTZiMzktMjNmZWY5Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.665943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jzmyj0y9c1nqzzr679jn9apq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA1ZGY4MmMtYjhiNWQ3ZC1kNWEzZTNkMS02N2I0MjFlOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.675636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jzmyj0yk3acps3mcp200hwgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjU1NDgzZjItOTA1MWI0MzMtNWM1NzMzODAtOTJlZWQ5MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.685658Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jzmyj0ywa8sead6y0cpc1rcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmViNDE4ZS1kOTNlNWYyOC1iNWQ5MjRlNC03ZmM2ZWQyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.695949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jzmyj0z6am1cp4z16h7k0ctk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E0OTNmNmYtY2VlYzRkMTktMjc0MjAzNmUtYWQ2ZGE1Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.706193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jzmyj0zh09m3cxqhcxyrv7wt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdkNjZiMTctOTk0MDI0MzgtMmI5YjY4MTMtZDZkYmIxZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.716418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jzmyj0zve8z3sbvbgyb2pk50, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjNiMWFjOWMtZjA4MWRmZWQtN2FjZmQ0MmYtNjkxZTJkODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.724903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jzmyj1050zjxwadnfrb13pqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyYjIzZjYtZmJjMzk0MmUtNzFkYzMyMTUtNDI0ZTA5NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.733389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jzmyj10d5f3jx66ch1nwnrmt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRjMTlhMDctMmVlNzg2MDItZmMxNmYzYzctYzA4NWJkMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.741813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jzmyj10p68j2gm1mhefzcsqa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY1MGFlOWMtZjc4NzZjNjktNWRjZDAwNWItYmQxYTIxMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.751248Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jzmyj10y43s9k86mzn4f6mb1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI0Zjg3YTctZjcwNDliYTAtMzY4OTdlZjAtMmZjNmVmZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.760523Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jzmyj118b8ehwng9pv4yetb4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmM4ZGVkMGItNDU3NTgwZmItYjM0YzM2NmUtYjI1ZTgwYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.770078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jzmyj11hd1s1mgs1ba49etbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNkODcyMzctY2ZlMDg1NzQtODZkOGQ2ZDgtZGY4ZmIwZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.779581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jzmyj11v3eah3mtrq5b7j0y6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjlhMzA2MDEtMzcwM2YxNjMtYzNjMGM2MTQtY2RkNzcyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.789166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jzmyj1249krm23k48n2dby40, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNlODUwOWMtYWJlNmU2YmUtMWQ2MjAwNmMtOGZlZTY2NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.798781Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jzmyj12e49b1j5zcghax2jz6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjNmQyMWYtYmZjYmIxZTMtYWU0Mzk4MTUtOWIzYjA0ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.808411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jzmyj12qc7hzy8zay0e41af6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ3MWJjMDctZjM0OThkOTYtYzkxODEzZGUtYTJiNTE2NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.818108Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jzmyj1317gttcke6fem03k5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYyYzc5YjgtNzI3MjRiNDQtNTZiZjg1NmEtMmRlMmYyZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.827866Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jzmyj13bak0me48sdvjbrk6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NhNTY4ZjMtYjBkZGMzYS01ZmM4MzA4OC03NzE0NzQzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.837531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jzmyj13m7xzcygd5gwsm5k2n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTE1ZWEwYmYtNTNlNDFmYTktNTU3NmIyMDctY2I3ODMxYjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.847296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jzmyj13y8kb3qqyhq8jp940m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmU3MjYxMDQtMzYwNmFhMTctZjE5NWJhMjAtMzMwYmMzNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.857310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jzmyj1484pej5e8dmfx4n9cj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWZiMzgyZi1hNmE3YjIwYy1lNjhmNDYyMi0yMGZmYzM3Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.875092Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jzmyj14n79wgk2s7ex6be02b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U4MzljYmQtNDc1ODE4YWMtYWYyZTQ4N2EtZDIwMmVjODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root |68.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest |68.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |68.3%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |68.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |68.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] |68.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |68.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-07-08T12:00:50.594472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001974/r3tmp/tmpXxNVxd/pdisk_1.dat 2025-07-08T12:00:50.730016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:50.746956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:50.781554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:50.781597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:50.792272Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:50.866506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.108828Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj0c445mt7qmb40t6ehfa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRiYTQ1MTktZjg2ZGNiNzUtNDVmY2IyN2QtYTM1ZDBmYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.119191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj0d6544nve8d6na898e9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAxZWIzZjMtODhhMWMzOTUtZDY2MWI4OTYtOTM2YTk2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.129093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj0dg8vzgs2qmryeg747p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDBkNGIzOS00ZWYyNmJkZC02OTM3Y2ExNy0yMDE2ZGRlOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.138513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj0dt742h9rehvv1x0qy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJlNjJmYjItMTQxOTM5MzMtMjliODkwMzMtYjYwNDlhMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.147362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyj0e31pfbjpw07082p1xt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmM2NDM2NGItZWQ0ZGQ3NjctOGJkMzE4MDMtZGJiNmZlNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.156981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyj0ec2yp7cam3ycrbxzb7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlMDE3OWEtZmY5ZTNhNDUtNzhhNzMwMDYtODk5Y2EzMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.165921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj0epbzxx6je8x81wzzsp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNhOWZlMzUtMTI0YWVjZmYtNDllNWFkMDktZmZlYjZkOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.175611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj0ey9j6pmv82148tb1rd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE4MGNjYTAtMWRkZDc3ODMtNmIyZTQwNmYtOGU1YWIzNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.185657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyj0f8c2g96rsxmzjbx2pf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjEzZjhmYTItMmE1YTcxN2MtNGNmZDUzOC1lMWE1YWQ5Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.195764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyj0fjbtyxvx5e63fzqavt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA0MzJjMjQtOGZmMTA1YTUtMzBjZjMxNGMtY2FmNTIxZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.205392Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyj0fw32eyjap1rrh9bj2y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg5YzQ0MzUtMmY4MWY5MWEtNWEwMTI5MmItMzM5ZWVlMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.214756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyj0g697a0vcyvx52k8d7b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFjYWVhNmEtYzI2OTE1ODYtYzZiMzg3OTMtZGVlNDQzMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.224502Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyj0gf3daszawaj4281z2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFjNjhhOTgtYzg3MWJhMDAtNGVmNTg3NmUtMzcyMGQxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.234741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyj0gs4mcpz11kyyr9x7t9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODliOTRiZTctNzhhNzczNTMtMjY5OWJkNzktMTJlMDIzNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.244912Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyj0h314j7qwhas3e06zgp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE1MDY1ZmMtMTA2Mzk5NDYtMWE1MzAxNTQtYWNlMWJlMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.271645Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyj0hp8g4sg5bejnzjykyt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFjODk2NWEtZjU1MDQ2YTQtOGUwODIxNi1lZDczMWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.282115Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyj0j87asrpjpt2qeg9530, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY4YmI1N2QtNWY3ZmZlMDEtMTE2M2YxYzAtZDM1MzdlYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.292362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyj0jke6pkdjr1fx5gbd67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmNjFlYi0xNzUyZTVjNi00ZTk1ZGE0NC1iMjNlNzA2ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.302618Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyj0jx09y6vgg6r9a0mbvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYwYjdjZTgtYzAzMWEwYjYtNzA2ZjEwYzEtY2QyNjJkYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.312878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyj0k7eqmwvjagtqdhgyhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ0ODc4MmYtZDc2NTEyZTUtZGYyNDczMTMtZTg1MTcyYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.323197Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyj0kj8f9vddzwew49zdh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNiNGI3NDUtOGYyNzViYjYtYjhhZDkxNzUtMWVmY2RlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.333393Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyj0kw4a4sk0bx3vxcvqhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM0NGU4YzAtYmY1MWEyYzgtMjcwYzgwNjgtYzk3YjFiYTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.343495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmyj0m653xdr3zdhmn7mm3t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzMyZDg1OTMtYzk5NWYwOGQtZTNmY2I3MGMtNzQwYWE2MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.353332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmyj0mgd3ta8fp9kj69bpjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzlmZTBkNi02ODU2ZTEyMC0yY2YwNTFkZC05ZDg1ODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.362849Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmyj0mt596jpp1t4m2ydavd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UyYTM2MzgtZDQzNjI5N2QtMWU0NjFiZTctZTQ5MmJmNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.371864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmyj0n3brb2kfq5drv6x1pd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyMmYzNmQtZGFiZDA3OWEtNDc5NzlhMGEtYmUwODRhMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.381367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmyj0nccm7y5xmas210ty0x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmYyMzNlZDYtYzcwMzliNzktYWM1ODFjZTItZjM3NDg1NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.390649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmyj0np7xr5xg5rz6rhv0dh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzcxMTNjNDQtZGQ5MjNiN2UtMzlkN2JmNGUtN2JlZWYwZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.399963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmyj0nze7rthzc29ygxq9vk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTkwYTcyMDktYTA3MTg5ODItNDc4YzEyNDItYmYyNjU1MGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.407118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jzmyj0p9ajmf9kz7eyxjdmjb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE1ZmE3NjUtOWMzNjRkNjMtNzIzMGE5MTEtMTZjYThhMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.416100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmyj0pf2krwn3r0ha42t8kw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODljODFhMzYtOGU0MDM0NmYtZTVjMTY4YTUtZGIwNjBkYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.426355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmyj0ps2rf5epvph84vt9bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjgyNWEyNWMtOTcyOGZhMjgtMmNkZTEzYi0xOGZlYWVmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.437168Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmyj0q338y6xy7mar03rhvw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmNlMjAxZDQtMzczMzI3ZmQtZGM4NzZiNzQtZGQyMjAyODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not ... ecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.769374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715723. Ctx: { TraceId: 01jzmyj11h7xnw8115f7gmyj1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ0ZTFiMTgtNGQ4MGNjNzEtNTVjMmU0ZTUtMzgwY2ZmMDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.778851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmyj11t7v523p6ead09b8gy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY1YzJhZTEtZjEzNzRlZTUtYmM3MDg3MDItOTc3M2U5NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.788635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jzmyj124aba2y3v7vywp74d2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRhNTU2MTMtN2QyOTFiZTItYTdmMTcxZjMtNTY3MGM1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.803278Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jzmyj12d5yws7a972ba3wrm7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmRlMWMzNTUtM2NjZWIwNWEtMWIyMWM4N2ItN2ExNDE5MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.813989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jzmyj12wcegvzyfgge2e3h78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQzYWJmODktYWVlOTQxMWEtNGFhYTg0ZTgtMmIwNzI4ZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.824779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jzmyj137e47907b568b808tf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3MDhkY2QtZTQxY2FhZS1jMGQzZjRmYi1kMDA3ZmRiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.834478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jzmyj13h6jcqsbm4hj2qq547, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDRhZDY4N2EtMmJhYzUxNjItZTIyYmYzNTgtNzhmZmIzN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.844585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jzmyj13vc25sz91ec8rparrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhiZmExMDUtZTAyM2QyMmItZDRkMjQ4OGItYzgxNGFjNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.854119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jzmyj1456j01y7by8hcyajn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdlZjJhZDEtYmZkOTBhYmQtMTQ0ODBiMWEtMzQ0OTZjYmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.864147Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jzmyj14fdq9g12v3re2edmc4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJhZDA2ODgtMjc3OTg2ZDAtNmMzYzQzODctM2VkY2U3ZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.873875Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jzmyj14sd8kh2ex0rw9sjkge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVjYmYwMGEtYzUzY2QyN2UtZTc1NTMyNDctOTUxODRmNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.884617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jzmyj1536c0ayxmcffnj5qdw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA5OWU1OWYtNWI3YWFiNTAtN2I4NmY1YmItZGY3NzI0Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.894745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jzmyj15db78tcdrt3qjkb6bh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRjNzk0NWMtNzQ0MGI5YjQtYWEwNGU0YWYtZTdkODAzODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.905451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jzmyj15r82k15jh1jkbxn61n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThhM2MwNmEtZDU4YWE1ODQtNTRhOWZkMGEtNWE2ZmQ5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.915431Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jzmyj162b5ygd98we850jbs1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTg3ZTk3ZmYtZDE2YjE2YWUtZGVjNTA1MjEtOTU1MDBmNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.925560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jzmyj16c0g8t4cr5khskb52x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWVmMTU5ZWMtYzI0NzIwZWMtZjFiNzgwOGQtZjU3ODY3NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.936123Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jzmyj16pemwn2csg8c81r3d9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNmNTlhNjUtNzI5M2QyOGUtNmI2YzliZWEtYjE0MDM3NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.946135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jzmyj17148qkfdg9kxek71vp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA2MmIxNTktYTVjYzA0ZmQtMzM2Yzc0NDQtODQ4YjU3NmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.956255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jzmyj17bavvf79zc0n0serfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzkzNGRkNTYtNGM5NjVlZDUtMjI1YjViMzMtZDNmYzVmZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.966589Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jzmyj17n6k3sp2r4dsdvmtn9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWNhYzBiYTUtYzY5NzU2ZDEtMWMyNzg5MDYtODJmODYzNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.976893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jzmyj17z0qg83mtaqgp6561b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU2YmRlOGEtMjQ1ZDFkY2EtNzViOTlkYjEtZjZhZGE4Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.987574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jzmyj18a9jqwx4tn8w4rq3n1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTYyMzZmZmEtN2Q1MmQxZDAtMjkyMGEwYzAtODk2YWM4M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.997931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jzmyj18mbxkdhzt8xkrpd8dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY1MTY0ZGUtZmE5NzllYzYtNzIzMmI1MTktZjg4YjNjMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.008275Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jzmyj18z1hhrt6mdy8zjj525, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdlYWE2NTEtOGZhMTMwYTAtNjgwZGU4MTQtZjMwYzdkNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.018894Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jzmyj1993rxze812xm4k0ms6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY2MGM3YzQtZjdkNzVlNDktOTRlMjYyZDctZmM0ZGE3YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.029297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jzmyj19m5egdg7zeqczxxjpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUxOGMwN2UtNDU2MmVlODItNDVmZTA4YTUtZWUxM2JmM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.040125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jzmyj19y0rchrz6ywtapbtbh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJkZDQ3Yy1lNDYxZDZmNC1hNjI0YmU5OS1mMjU4YzU3Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.050723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jzmyj1a9c5058q2ph12dsbg0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZiMjk4OTctNjQxYTMxMzYtYzAxN2ZhMjAtZTI1ZTVhZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.061198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jzmyj1ak5fw9z3a6r5e114aj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI1OTdmNWMtYjkwNzBlYTctNGUwZjVhMmMtZjVhMGI1ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.071106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jzmyj1ay4gxmy8ycthvnaqbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg1OTgxNTMtZjIwMGUxZjUtODBjNTAyZGUtMmI4MDgwYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.081442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jzmyj1b87ppq2cgzwk42swgz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVjYjk5MmEtZTVkN2UyZDMtOTcxY2NkNjQtNzIwYjVkODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.091772Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jzmyj1bje19pdsfapefvgnbb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmZhMmM1NWYtNmM5MGNhYWQtYTUyZWE3ZmUtNWI0ZjE0MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.102499Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jzmyj1bxb2a4sne6d95yk4rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4ZDY5MzUtYjNjOTA4YjYtNWE2YzI3MjQtNzA1NDY4M2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.113501Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jzmyj1c731graxnvqty63ykm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRiYWIwZmItZDhmYTg3MTQtOGFlZWRlYjctNzQxNzMxNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.123804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jzmyj1cjayqjkcqq125bm8gf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTYxMmIyOGUtYTdkZTJkMjYtZDYwN2E4YjgtY2MwZTAxNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.210207Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:52.244413Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jzmyj1g6bzysf24tk7g4vrmr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc3MTRhMi0xODQyMzc2ZC01ZjEzNDVhMy1mODVkN2EzMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root |68.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-07-08T12:00:37.586075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a1f/r3tmp/tmpqGT4Xc/pdisk_1.dat 2025-07-08T12:00:37.701094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.721342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.765804Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTYzNTI3YTktNmJiMGEyNTAtOTdmMDE3MC01OTRkODhkOA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTYzNTI3YTktNmJiMGEyNTAtOTdmMDE3MC01OTRkODhkOA== 2025-07-08T12:00:37.765975Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTYzNTI3YTktNmJiMGEyNTAtOTdmMDE3MC01OTRkODhkOA==, ActorId: [1:571:2492], ActorState: unknown state, session actor bootstrapped 2025-07-08T12:00:37.766094Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTYzNTI3YTktNmJiMGEyNTAtOTdmMDE3MC01OTRkODhkOA==, ActorId: [1:571:2492], ActorState: ReadyState, TraceId: 01jzmyhkc671yp03e6qqxwqand, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: 2025-07-08T12:00:37.829143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:37.829182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:37.829738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.841945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:37.855985Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:612:2522], Recipient [1:617:2525]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:37.856237Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:612:2522], Recipient [1:617:2525]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:37.856325Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:617:2525] 2025-07-08T12:00:37.856377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.867958Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:612:2522], Recipient [1:617:2525]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:37.868149Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.868175Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:37.868327Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:37.868336Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:37.868343Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:37.868393Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:37.868414Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:37.868425Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:631:2525] in generation 1 2025-07-08T12:00:37.868497Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:37.880031Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:37.880123Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:37.880152Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:633:2534] 2025-07-08T12:00:37.880158Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:37.880163Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:37.880169Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:37.880252Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:617:2525], Recipient [1:617:2525]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.880260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.880326Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:37.880349Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:37.880369Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:37.880376Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:37.880383Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:37.880388Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:37.880392Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:37.880399Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:37.880404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:37.915585Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2536], Recipient [1:617:2525]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.915612Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.915622Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:608:2520], serverId# [1:636:2536], sessionId# [0:0:0] 2025-07-08T12:00:37.915637Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:636:2536] 2025-07-08T12:00:37.915642Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:37.915689Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:37.915767Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:37.915779Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:37.915807Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:37.915816Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:37.915821Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:37.915826Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:37.915830Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.915879Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:37.915884Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:37.915887Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:37.915891Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.915902Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:37.915905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:37.915909Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:37.915912Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.915917Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:37.916122Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:37.916130Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.916133Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.916143Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:37.916154Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:37.916595Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:637:2537], Recipient [1:617:2525]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:37.916607Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:37.962254Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:652:2546], Recipient [1:617:2525]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.962276Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.962285Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:651:2545], serverId# [1:652:2546], sessionId# [0:0:0] 2025-07-08T12:00:37.962583Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:617:2525]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-07-08T12:00:37.962591Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:00:37.962615Z node 1 :TX_DATASHARD TRACE: Trying to execute [300:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.962624Z node 1 :TX_DATASHARD TRACE: Execution status for [300:281474976715657] at 72075186224037888 is Executed 2025-07-08T12:00:37.962629Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [300:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-07-08T12:00:37.962635Z node 1 :TX_DATASHARD TRACE: Add [300:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-07-08T12:00:37.963371Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 300 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 300 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:37.963387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit h ... 00:52.046607Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-07-08T12:00:52.046745Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:52.046754Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:52.046760Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-07-08T12:00:52.046774Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:52.046874Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NThjZDkwOTQtZjAyMmQwMDMtMTdmZGVkYmItMmM4MWI3MDg=, ActorId: [13:694:2583], ActorState: ExecuteState, TraceId: 01jzmyj1a41v0sk2ckyb458kdj, Create QueryResponse for error on request, msg: 2025-07-08T12:00:52.047045Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj1a41v0sk2ckyb458kdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NThjZDkwOTQtZjAyMmQwMDMtMTdmZGVkYmItMmM4MWI3MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.047110Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:798:2583], Recipient [13:752:2624]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 798 RawX2: 55834577431 } TxBody: " \0018\001j3\010\001\032\'\n#\t\213\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715664 ExecLevel: 0 Flags: 8 2025-07-08T12:00:52.047116Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:52.047140Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:752:2624], Recipient [13:752:2624]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:52.047144Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:00:52.047152Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:52.047174Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715659, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-07-08T12:00:52.047183Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-07-08T12:00:52.047190Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-07-08T12:00:52.047194Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-07-08T12:00:52.047198Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:52.047201Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:52.047207Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v301/0 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v301/18446744073709551615 ImmediateWriteEdgeReplied# v301/18446744073709551615 2025-07-08T12:00:52.047214Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-07-08T12:00:52.047218Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-07-08T12:00:52.047221Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:52.047225Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-07-08T12:00:52.047228Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-07-08T12:00:52.047238Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-07-08T12:00:52.047247Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715659 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-07-08T12:00:52.047258Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-07-08T12:00:52.047268Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-07-08T12:00:52.047272Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-07-08T12:00:52.047275Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:52.047279Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:52.047284Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-07-08T12:00:52.047295Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-07-08T12:00:52.047299Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:52.047302Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:52.047306Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:52.047312Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-07-08T12:00:52.047315Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:52.047318Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2025-07-08T12:00:52.047327Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:52.047331Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:52.047336Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:52.047522Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:62:2109], Recipient [13:752:2624]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715659 LockNode: 13 Status: STATUS_NOT_FOUND 2025-07-08T12:00:52.062818Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj1agdw2msfnzzvf58bgm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzhjNDVkMjUtNmU2ZTU1Y2QtNzg1M2UzZDItOGY5NzJmOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.063207Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:819:2669], Recipient [13:752:2624]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-07-08T12:00:52.063242Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-07-08T12:00:52.063252Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v301/0 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v301/18446744073709551615 ImmediateWriteEdgeReplied# v301/18446744073709551615 2025-07-08T12:00:52.063260Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v301/18446744073709551615 2025-07-08T12:00:52.063270Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-07-08T12:00:52.063285Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-07-08T12:00:52.063289Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-07-08T12:00:52.063298Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:52.063302Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:52.063313Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-07-08T12:00:52.063318Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-07-08T12:00:52.063321Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:52.063325Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-07-08T12:00:52.063328Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-07-08T12:00:52.063342Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-07-08T12:00:52.063388Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:819:2669], 0} after executionsCount# 1 2025-07-08T12:00:52.063395Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:819:2669], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:52.063409Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:819:2669], 0} finished in read 2025-07-08T12:00:52.063418Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-07-08T12:00:52.063422Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-07-08T12:00:52.063425Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:52.063429Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:52.063438Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-07-08T12:00:52.063441Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:52.063445Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-07-08T12:00:52.063449Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-07-08T12:00:52.063466Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-07-08T12:00:52.063628Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:819:2669], Recipient [13:752:2624]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-07-08T12:00:52.063636Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-07-08T12:00:50.730091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001973/r3tmp/tmpQXioWv/pdisk_1.dat 2025-07-08T12:00:50.849988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:50.866509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:50.898562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:50.898599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:50.909185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:50.985363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.223293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.322352Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:51.511790Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj0rf8mtd11v1x5p600s3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVmNmNhOTEtOTkwMGI4YTEtNGNkNDUxN2ItM2Y5NDE3NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.522800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj0st5mbtsywn853kezfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWIxZmZiYjYtZDE5YjkxODYtODE3MjZhZjgtMjQ4NzgzOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.532132Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj0t43c6e238gbmdrnx3x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjA3YmE1MmUtM2FjNWU0NTktNDc0NTg5NmQtYjFjYjA0MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.545055Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj0td8h6hs4ph2ytapkh2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZjg2NzMtNjQ2ZTQ0ZGQtNGM0MWU0ZGQtYzZjYWYwNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.554234Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyj0tt8wnajng0fcv4nygv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJmMjAyYTUtNjIxN2NmZDMtNTc4ZThmNzMtNDc1ZjQ4MzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.563797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyj0v39w4jz3e0qgnkywpe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA5ZmY5ZTgtNmQ4NzRmMTUtMTgzMmQ4ZmUtZjgyYTlkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.573515Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj0vcbq9r9vn06v9b14yd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjUyMmZjZTYtYWVmNTY5ZGQtN2Q5ODczNTktMjBkODM1ODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.583860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj0vp3n0fx85x6sw1xpqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU3YmE3NjYtN2YxOGNkNWItNDZlYjQ3NzMtZmQwZDlmYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.594729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyj0w1d2e0xf4na0rd35xy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ1MDQwMmYtNTkzZmMwZDMtOWRjYThhNzUtZTQ1NjgzYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.605021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyj0wb7c0cydrqfr5b2mtp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDlhZDA0OGQtYTFlYmMzNjQtM2ZjN2FmNDUtZTlmZTdjYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.615874Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyj0wpd4vrpermg3861mdz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc1YjhlOWUtMjc3MDdkMGUtZDk1MWRhZTYtYjA3NWZhNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.626331Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyj0x1eg1m6m78qtn24s06, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk0NTNiZDEtOTc2NTE0M2MtZjliNzFjYzQtNmZhYWI4YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.636344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyj0xb2gd1754w7636148c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmODkxZS1mMDVlYTRlZi1lOGM3NTQ0My00N2VkNDRmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.645938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyj0xnbfj4r0w9vfsydxpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkxMGViZjgtMWE2MWU1ZDQtNGEyY2ExNjgtZWU4ZTE4NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.655851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyj0xz7vrwag03tw23acwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA5OWRlNS1lNDFjMDU1MC00MzhmZmI1ZS02ZTBlOTY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.665915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyj0y91pnccj74g76w5am5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjg5ZGU0NTctOWQwMDRiNjEtZGJlYjllMTMtNDg0ZThjMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.675737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyj0yk3d4w3z6rezkbcpt6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJmMzU3MjktOTRjYmU0NzctMzU5ZDEyOTYtOGU5NDNjYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.685574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyj0ywamxcsssygp4rpx36, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhhYjAzOGYtODA5ZTVhYWYtOGI0MTYzNDAtMzhhMmU3ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.695372Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyj0z69s41f43b3jsfxxte, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTczZjlhMjctYTkxMjlkNWEtZDg2Yzc1Y2MtYjVkYWJjNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.704903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyj0zgenqw555btp3rcmwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U3YzIzM2MtNTBiMGIzOWMtOWY0NzNmNjctMTA2MmM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.713474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyj0zt0gnnfde86njqesxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY5YjA1ZDUtNTJjZTU1MmQtODZhNDU4NTctNGI1NmU0NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.722925Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyj10222p9sgnz8ya17cq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDk3MzkwZGMtYjk2MGRjMzEtYWZmMzk0MDUtNmU5MTRmY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.732105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmyj10cb0e5v835q8he00q5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhlZDcyY2UtOGQzZDRjYWItZmVlMTU1MTgtN2E4ZmUwNmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.741695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmyj10nenkchkqtyebmk8hd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQ4MzRhOTctNjRjOWYwZjktOGM1NjVjMDgtOWQ0YTVmMWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.750960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmyj10y4wkwm2syh0xp9tjt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTNmOWZkMTItZjAyNTVkZGUtODMwNGI5ZmItNDA0MjI2OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.760437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmyj1184d6k2dqsjggzs358, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQwODY0ZjctZGMzOWJlZGItMzI3Mjk2YjUtMWMyMDc3MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.769625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmyj11h8afbdj4sbma57jfk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmVkMThjY2YtNWM2NjVkZTItNWJmNDkzZmQtODc4Y2UyODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.778896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmyj11t835r1n9qc7mfs8yx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGUxZjhkMmQtZjc3OGJjMTctMjRlNDFkYjUtODUyNWFlNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.788635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmyj124d0fhv3gt2syqhn3t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Y4ZjFkOWYtOTFiZjBjOTEtYjc2NzIwZmItZGFhZWZmNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.798121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jzmyj12d0f60qbx4ygm1q0a6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTgyNTlmMmYtNTYyMDZjMzYtZDgyYTc5MGMtMmY1MjlkYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.807938Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmyj12q0rp81x4fbtdh567c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdmMWYzYmMtZjU0OGUzYy0xMmIyNDdmLTNmMTI1YjFh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.817665Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmyj1314g94frfnszxt25wz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDk2YjU4NGEtZTZkZDE0My1kMTk2YTVlMy1iMTYwMDhkZQ==, CurrentExecutionId: , CustomerSu ... Id: 01jzmyj1cp4njnq46dfp6sey33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIxMDBjMTUtODJmNWU3ODctY2RkYWExZTUtOTIwMzQ4OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.135849Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715723. Ctx: { TraceId: 01jzmyj1czahbwaphdrvf3fct7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2OWM1NTEtNjJhYjQ3ZGUtNzc0MDViYmYtYzg0ZTgwNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.144568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmyj1d95q84s4h2ftd8vb94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ5MjJmNzYtYmUyMjJlMGQtMTlhMTgyNjUtOTllMzNkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.152199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jzmyj1dh3zg3q8arjavkqbts, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAxZDIzZjgtZDc5NjQ5Y2ItNDBlODAyZGQtMjVmN2M0MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.160654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jzmyj1ds7rzbqs1td07wsxvy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZmYWI0ZWUtMmM3MTFkN2YtOGZlODAxMTUtNDUxZWM3NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.168994Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jzmyj1e1ehw4qn296pemtsg9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFjODljM2EtNWM4M2ExMC01YjU1ZWFhOS1mNmVmMWM4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.177513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jzmyj1e9cx5e2gpv5twtxb9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiZTY5ZmYtNTBhYjJhNzgtN2UyZmJmNi00MzU4MzdhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.186510Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jzmyj1ej5154tacptt2n8377, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE2ZTVmMzQtNmMyNDM1MzQtNWQ0NTI0YWEtOTVjMmNjNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.196371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jzmyj1evanc7qzfjvmaq2ewe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2I1ZDUwYjItOWNkY2JjLTc3ZGNiMTZmLTRiNjFmZDQx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.206598Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jzmyj1f57rr4w2bcqzwnqznp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODM5ZDE0ODAtNzExMzE4M2YtOGQ1NmY1ZTMtYzlhNDU0ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.216477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jzmyj1ffbjrz49ssmpraf4dv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThhMWNiNjgtOWM0NmMzMjUtMTk3NWY0YS1mMTU3ZDMwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.225611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jzmyj1fs269jwvq09kyshyqb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUzNmQ2NmMtOWNmMWViNzAtOTE2Yjc2YWQtMjk5NWIzZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.234610Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jzmyj1g2a2395wvdk9tm4phx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTRmNjM2ZmYtMzAwNTBjMDctYTQ0ZGYzMGUtYjk3YjA4YjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.244619Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jzmyj1gb0f7xemq27pjknk45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTNmMmNjYmMtN2I5M2Q2MjQtYmYzZGZiNTEtZjFmYWYwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.255661Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jzmyj1gnbxd8n7c752zwcawa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTkxYTczMTgtNjViYWM0YjQtMmQzMDg5ZTYtYjg1OGE1MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.267102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jzmyj1h1cn4jw71re0dnhq8y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc3YTQzODgtYjZlMGUyOGMtZDI5MTA4ZGQtMmRjNjUzN2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.277887Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jzmyj1hcf0pxw6by7x553gkj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTkyMjcyYjctYWViZWEwMGQtNTNmNDNjNmUtYjVmYTQ3N2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.288750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jzmyj1hqb1achf1782yxj8fa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWEzZjVhNGQtOTVjYmM4YmYtY2EyMGM4YzUtOWRiNDg3OGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.299742Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jzmyj1j24468a703n3sk2znk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzJjMGRhYjAtM2E4OWE5ODgtNzJkNTA3NDctNDcxOWIwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.310667Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jzmyj1jd23zs657t932a65s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc0NDY0ZmQtNDQ2ODkyOWUtN2FmZGY2NzAtOTY5ZmMzYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.321698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jzmyj1jr1cb57k45evvvsyw1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ5ZmUwY2YtMTRhODg1OS1hNjczZjFiZi00NTBhNjg3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.331704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jzmyj1k2c0hgdr7pcz8vsk0w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNlODUzMDUtMzEzNjY5NDctZGY1ZjBkMWItOTgxMmM0OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.341327Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jzmyj1kcbgcqqa04awjcggyg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM5NmI3YzUtZjU4MzlhODAtNTFlOWQzMTUtODk1NWE5NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.351236Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jzmyj1kp0nf10qc144hnp857, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODhhNzE0MTctODg0NzE1MDItNjdkM2M4ZmEtMzBjOTU5NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.360813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jzmyj1m097dt1t4h5dy1pfr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTNiMGUxYjQtNDMxNGY5YTYtZGY2MjBlZTYtYjk4OTkwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.370838Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jzmyj1madtc8zx2rr44m4jxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWE1ZWM4ODMtOGIxNmM5MjEtMjYxMjA1MjctYzBlNTc5ZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.381198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jzmyj1mm0gdeq20dveqc3hc2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU2MjQ5OWUtOTZiYzUxZi03YzRkODBmZS1jNzkwZjQyNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.391710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jzmyj1my9f4dptxh58j5bqfs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE0YzcwZDMtZDU2Yzk3ODMtOGVjOWNkMDEtNmQ4OGI0NzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.401940Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jzmyj1n87qnxjmghb2re8cnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yjc1MTMzN2QtODM4Yjg0NzYtM2U1M2ItNGNmMzgyMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.412264Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jzmyj1nkf23weppfaa5v6xpk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjMxZTE3YzYtNWNiODQ3ZDEtYjI1OWRiOGYtZTJkNzVmZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.422427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jzmyj1nx28361tr912x0jt5r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQwYzViODctZjgxNzE4YjktZjVmODRkOGQtZDE1MTQzMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.432549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jzmyj1p7414xk9m3z5fgg3t7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzgxNmM5NmQtNmFlYjhiMjctOWZhNTkwNC0xZmY3MzlkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.441267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jzmyj1phcx6znbz8zb77wfnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRjNjMzMi01YmVmNDJjOC0xNGQ4ODAyNC0xYjE1OWZlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.451148Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jzmyj1pt12de77ngvjgen3pd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU5ZTVhMDAtZTZlOTRkNjgtOWNlYzdiZjktMTk5ZTA0MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.460896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jzmyj1q44gq52e08xjvdakhb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2UzMTdiMDctNTY4ODUzY2QtYWUxNGNlOC0yZWExZTk0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.470680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jzmyj1qe4xer5rxc8cz7209h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGFmZDVkYmUtZmE3YzMyNmMtZGFmNjBmNjMtYWMwZDhmOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.586833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jzmyj1tm4j29xt665jp3e4bt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjM2E3NmUtNGNkZjBmZmItYWQyMWZhOTQtMmQzMWVjYzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root >> BasicUsage::WriteSessionWriteInHandlers ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-07-08T12:00:51.371986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001946/r3tmp/tmpJBGn9d/pdisk_1.dat 2025-07-08T12:00:51.490529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.507027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:51.539152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:51.539189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:51.549880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:51.630903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.864513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj13q0skbmaqmbjddmp1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzJmZmRmN2ItMmY4Yjc3YTctZmE3ZmY2YzMtNzQ5YjdjMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.874735Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj14tbz6tvpta1t3y8ezn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJlZjJhOGMtNzk0NTM5ZDEtOTVlNWVhNGItYmE5ZDg2OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.883435Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj153fcqcft73dewnfa99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWUzMzAxMmMtZjAzMDllZTMtOGEyZjVkZDAtMTJkYjIwZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.890767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj15c30ywjhyv570rfp5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWExZTFiYzEtM2FjZTJiZjEtNzU3YzUwZmYtMzhkMjE3ZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.899936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyj15k06wtgdb7xe1hk263, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQxM2RlZGYtNDRiNzg4OS0yMDA3NjljYS1lOTU1MGViYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.909336Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyj15xc1pydj64rat53jq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY0NTM1N2MtMjUzN2ZiNmEtYTZiYzRhOC0xZTBhMWZmMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.918652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj166e3fx17cxpgg6g5rx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI5MjMxOWUtNTZiMGQyOTMtY2U5YmIyMTgtZmZlZDc0NDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.926737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj16f8tn5wfsht2afw1sj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDUwMmUwZDItYmM3ZmJjMzAtMmJhODQ0NzQtMTlmZDNkNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.936818Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyj16qcq6vkneyej1nxx7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg0NzZjMTMtYTI3MWJkYTctNmQwMGUzYmEtNzU5NzlkYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.946703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyj1728dp342tg875bg35d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYxNjljMjUtMTQ4YTY0NmMtODc2YjNlNWQtOGYyYWUwMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.954649Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyj17b12kswzv5wq31m92v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFmYWVmNGEtNWIyMTM1YzMtZGFmNWYyNjYtNzg5OTFmYWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.963181Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyj17kd3w5x2f79c964v6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJkNjkyMDctNjg5YzZlODUtOWRiMzU3NDEtYjQ2ZTk1NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.972288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyj17w6d07y1qnzn5s7kvz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY2YjdlM2QtMmNkYzQ4YzQtYTZkNmU4MWMtZDhhYThhNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.980731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyj18558gzhtfwn8kfjgey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY2ZTk2ZS0yN2Y0MmQyZS05YmExMTdiYS0yOGIxNGRlNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.989948Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyj18d46ap5ssevmhge6pz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDhkNzdhMzItMmQ5Y2I2MzktY2U1YzlhNDgtZDE0MmMzYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.999025Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyj18pefqt2v2ayzckthp8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTQzZDk0MTctYjU2NjU2ZGUtZmM5MDNiZmUtOTNmOGJmZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.007965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyj190d9gbhz27srscx19e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg0NjEzYWMtZjllNzFiNTQtMmZjNWU2ODQtNjExNWIyYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.017560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyj198armkgzrmszkc4jfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjQ5YmY0NmItNWI2NzQzZmYtNjJmMWNmOTktZTJlYjZkMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.026820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyj19jfmcka38qf9w7qqgh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTdiZmY2NzgtZGQ3OWNlMmEtOWNmZmIxZTgtZGEyZjljYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.035206Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyj19vdrrmf22dgtkyn00c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZkNWRkMzItNWI1NDczNTktZmU5YmFjYjQtYTg1MmQzZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.043710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyj1a4berq6heezv1thzp6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNiZWMwYmUtMTE4NmIwOGUtMzZlZWQzZjctNjAwNjE1NmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.052230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyj1acb61q7r42n0q6w3ey, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M5YmZjMzEtMWE1ZjIwOTktMmE1ZjE0YjktYWNkN2U1Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.059434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmyj1anfkbq6ezcj1cxdbw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTEzNDBjNjUtOTExMGE1NjYtMWUyZGY3ZGMtNmZmMWUyYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.067691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmyj1awfmcz1q3c4z4jadqs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc3NzBjZGUtZTAyMjc3MGQtNzM2ODE3ZGItM2I0ZDU0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.076843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmyj1b48ztkyatefab8b6vj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmJkMzQyN2QtZmE5YWFiOC1jY2ZlYTc4Ni04NjBlMTdiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.085705Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmyj1bd908387f3c61kcfby, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQyODA1MzctYTk0MzE4OTMtMmY0ODk3MTMtODBiYTJhNTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.094883Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmyj1bpe3zkr33he15fvd1p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZhYzlmMTktNzY0ZjIwZWQtNjQ5MDI2MTYtYzBkNDk5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.103593Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmyj1bzd6kjd4s16mkx4gfb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUzYTA1ZGEtNTQxMDBhNGUtY2ZkZDEwYmQtMTY2NzkxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.111955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmyj1c8bmjn826ta44q8qjj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWIxY2I1ZDItMWJhMzUzODctOWRjZjBhZDUtYzkxM2EwYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.120152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jzmyj1cge2fv29xr735pxy7d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA2MDI1MDctM2RhMGFkNmUtOGRmZDZlODctMzhmOTE5NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.128469Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmyj1crdes83mcn40nn8pt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU5NjVmLWE4NGM1MjA3LWU3OTFkNTZlLWM5NDZhYjU5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.137703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmyj1d145zxx1hav0xwdn35, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI1OTNhODUtM2ZhM2Q1ODgtOWEzNmYyNzAtMjUzNzRlYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.146384Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmyj1da6p5v5yvv2kmxnfqv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I3NGUzN2QtZTA2Nzk4YzItZjlmZmE2NC04NGViNzgxNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database ... 01jzmyj1py77ctk0ysj997w1nw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYyOTc1YWUtZjA2MDQ2NS0yZTk1ZDllOC02NDlmNjY5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.467717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715723. Ctx: { TraceId: 01jzmyj1qa1qb3v2hat4pwr2qs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ2YTY3MzgtNjY4ODkwYjItY2JkOGUzMmItMTIwZGJmOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.477276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmyj1qm9jyajrxp7x5fje4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZmOWNiNTAtYWExMWY4ZTItYjgyMWIxNTctYjBiOTZjNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.486539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jzmyj1qy61wf2jtzgtyxw8gg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWQ4ZWM0ZDAtODkyYTc3MTgtMjY2YTI1MWQtZjZmZWM2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.496921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jzmyj1r744kba4s0fyp298nd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjQzNzEyMTUtYTlhMmQ4Yi0yNDBlMjM4YS04MTU3ZTNlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.507082Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jzmyj1rj17hb6pt3pdhj19n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc2NDU5YTUtZTkyNTc0NGYtZTZmYjBjYWMtNTQ5M2RmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.517028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jzmyj1rw11ersm48pn7nxthr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIwOWIwMjItODUzMjhkMjAtOTgzY2QxYTUtYTJmOTZkNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.528231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jzmyj1s67vnm6fyr6tfeh2m7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg4YWNmNDMtZmY2MWFmNjctNGIyOWQxZWQtNWMzMDU1OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.539159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jzmyj1sh7610dxeews108jz0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njk5NTJmZWQtMmUyZjE1ZTctZDgxNjI4NGEtMTMwYTFkZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.550101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jzmyj1swaykz8mprc5vd9fvx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTdiYjMwOWYtZTg0Y2U4YzUtMzY0MjJlMjItMzU3ZDJmOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.559870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jzmyj1t7afh4gc40tb3r5xxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJkYTE5MjgtNWFkZGRmYzgtYTBkY2E3OWEtMmRjYjk3MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.569449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jzmyj1thbdbj8n0fy1z6zv5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY0MGUzNTMtODFlYjljOGMtMTgyNDM2NDktMzg1ZWFiNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.580427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jzmyj1tt15cywwjpsewytbtq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRiY2UzZTYtMjVmYTNlNjQtYTIyMzBjMjgtNWViYWU5N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.591455Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jzmyj1v566h6c77kfr68438e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYwODBjMy0xZThjZmJkOC03NjUyZWNlNC1jZjZmZmQ3Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.601008Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jzmyj1vgcmmkshp8ft6vj5kj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQyNmUyYzctM2E3MDFhYzQtNDMwZmU3MjUtODlhZGY2YjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.609293Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jzmyj1vte99epx9kzf8m2rrd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjYxNzI1ODAtZDRiMmEwMzQtN2ZiOWUyNjAtOTQ2M2ZiZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.617356Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jzmyj1w2bxszrbgngk1mkwdv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTYzMjFkYmQtYWRhMWU4NDItN2JjNTIxNjktOWE2ZGQ0OGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.626073Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jzmyj1wa8e89fhrve15a2vpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI2YjcwOWEtNjhiMDMzZTEtZGRmZDZjYTctMjQ2NzE3OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.636555Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jzmyj1wkc8zafkpyd3m96nme, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdlNmQ1MjEtZmQ1MDJiZmUtNzY0OWFjMTUtNmE2ZWEzM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.645544Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jzmyj1wx79ve9cjzfbjfbmvf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2U1NzIwZC0zOWJmMWVhNi0yYmVhZjVlNC1lMzA4NmE3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.654916Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jzmyj1x6dyjwf3qanya5w6n4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDdmYWFiMWMtMjZlNzg4YjItOGVkNjNjZjctNDUwNDRkZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.662588Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jzmyj1xf7k09w8fg6jw8war6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg5MTE0MTEtYjEyMDc1MGQtNWVlYjMwN2QtZGIwMTQzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.672379Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jzmyj1xqe65w8m70dg2z208v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzkwMDRlMmYtZDU0YzMwMzctMjE5MjFkMTgtZDZmODhiMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.681617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jzmyj1y1amyep4fwaddvgmxh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODkxNmFjYzMtOGM5Y2FiZi01NjEyOGY0My01NzMwMjcwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.689586Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jzmyj1ya2pq2s927pgejd05m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQzOWE1NTctMTIwYmQzYjEtNDM4NGFhZGEtN2E1ODNlOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.698568Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jzmyj1yjb0syyv08yyxz0h4j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWNlZGI5MjUtYTJmZmViZjUtOTZlNWEzMi01ZDliMTNmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.707507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jzmyj1yv0zxymcr0kfjz188x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThmZmUxODgtYmNjOTY0MzUtZmQ1NzM4Ny1mYTQ3ZWI1NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.714368Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jzmyj1z42g3yfn51wyp7ptx3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJlYmRmNzUtYTU3YjA5MzEtYjFjZmE3NmMtOTRiNzI3MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.721902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jzmyj1zb1kr5za69hz8dmy7a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZlMDEyMjgtYjY3ZGQxNjUtODM5NWU4MTMtMWZkYWI5M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.729761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jzmyj1zj4w7tvs1xwh5p6qzg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjNjI2YmEtZDI2OWYxODYtNmY4ZGZjNzQtY2FhNzQ2NTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.737774Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jzmyj1zt3rmaenbwnfhjs6qa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY3ZTg5ZDMtZWU2M2IyN2YtNzNhMzI2OTMtYmZlMDhhNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.747084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jzmyj20379p3672yc3jfgtaz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBlYzE1YWEtNjRiYmFkZTctOGE4ZDk0MDYtNzBiNjNlNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.755668Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jzmyj20b7n05by41g056f842, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWMwOTk3MzMtODE1NjJhMzgtM2QxYjEwOGQtM2QwMTRiMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.765488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jzmyj20mb4c3gqg90nz01bhx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3NTU4NDYtN2Y4M2Y3MDItNTljZjQ1YmEtNjJlZTVjZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.775899Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jzmyj20yckzmmv5ep4qtnyqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE0M2ZmNjUtNGU1ZjU3ZGMtMjQ3NWJhNmEtYjMyMTk0ZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.785097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jzmyj219exznwv1wscth1xgg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODgzNTBkZDUtNDBlZWY3Y2UtZDg4NTM1My1jOWRhNDU5ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.802738Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jzmyj21pczzcgnmxnntcvhnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY1MDYxZWUtZjk1ZjQ2NzMtMmRkOWZjMWYtYjJlM2YwMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root >> TNodeBrokerTest::RegistrationPipelining [GOOD] |68.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> BasicUsage::PropagateSessionClosed >> BasicUsage::GetAllStartPartitionSessions ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx2 [GOOD] Test command err: iteration# 2 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 8 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 14 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 20 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 26 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 32 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 38 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 44 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 50 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 56 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 62 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 68 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 74 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 80 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 86 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 92 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 98 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 104 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 110 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 116 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 122 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 128 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 134 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 140 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 146 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 152 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 158 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 164 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 170 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 176 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 182 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 188 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 194 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 200 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 206 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 212 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 218 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 224 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 230 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 236 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 242 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 248 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 254 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 260 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 266 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 272 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 278 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 284 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 290 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 296 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 302 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 308 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 314 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 320 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 326 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 332 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 338 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 344 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 350 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 356 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 362 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 686 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1364 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1370 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1376 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1382 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1388 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1394 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1400 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1406 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1412 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1418 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1424 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1430 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1436 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1442 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1448 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1454 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1460 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1466 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1472 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1478 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1484 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1490 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1496 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1502 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1508 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1514 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1520 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1526 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1532 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1538 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1544 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1550 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1556 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1562 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1568 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1574 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1580 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1586 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1592 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1598 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1604 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1610 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1616 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1622 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1628 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1634 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1640 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1646 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1652 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1658 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1664 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1670 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1676 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1682 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1688 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1694 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1700 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1706 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1712 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1718 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1724 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1730 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1736 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1742 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1748 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1754 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1760 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1766 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1772 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1778 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1784 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1790 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1796 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1802 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1808 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1814 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1820 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1826 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1832 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1838 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1844 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1850 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1856 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1862 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1868 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1874 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1880 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1886 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1892 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1898 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1904 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1910 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1916 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1922 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1928 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1934 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1940 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1946 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1952 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1958 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1964 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1970 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1976 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1982 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1988 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1994 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2000 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2006 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2012 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2018 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2024 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2030 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2036 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-07-08T12:00:51.497942Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510578Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510636Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510663Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510687Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510726Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510757Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.510797Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514663Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514687Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514722Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514745Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514758Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514770Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.514784Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.521031Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.521877Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522244Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522764Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522833Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522871Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522902Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.522946Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.523221Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523276Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523420Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.523458Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.523480Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523556Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523668Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.523703Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.523773Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523959Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.523998Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524017Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524035Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.524067Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524146Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524180Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524195Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524244Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524407Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524480Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.524572Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525026Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525229Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525513Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529364Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.530199Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.530658Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.558949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:51.558970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:51.563152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2025-07-08T12:00:51.570455Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:51.571030Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:51.571100Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:51.571282Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:00:51.572166Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:51.572190Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:51.572226Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:51.572238Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:51.572245Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:51.573510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T12:00:51.608194Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:51.608233Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-07-08T12:00:51.608240Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:51.608305Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:628:2249], Recipient [1:560:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.608681Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:625:2247], Recipient [1:560:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:51.608696Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.608708Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:51.608809Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:641:2254], Recipient [1:560:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.608848Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:625:2247], Recipient [1:560:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-07-08T12:00:51.608853Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:51.608862Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-07-08T12:00:51.608919Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: Domai ... ySetResult: response# { Path: dc-1/yet-another-database TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 3] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:51.649741Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/yet-another-database": scope id# <72057594046678944:3>: serviced subdomain# 72057594046678944:3 2025-07-08T12:00:51.649755Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:674:2189], Recipient [1:560:2189]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.649759Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.649769Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:51.649773Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/yet-another-database 2025-07-08T12:00:51.649794Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1024 host1:19001 to database resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:3 slotindex=1 authorizedbycertificate=false 2025-07-08T12:00:51.660790Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:51.660841Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200026000 Name: "slot-1" } 2025-07-08T12:00:51.660969Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:679:2276], Recipient [1:560:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.661000Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:625:2247], Recipient [1:560:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-07-08T12:00:51.661007Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:51.661015Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-07-08T12:00:51.661059Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:51.661084Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:643:2255] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:51.661127Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:681:2277], recipient# [1:680:2189], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:51.661142Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:51.661167Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-07-08T12:00:51.661183Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:680:2189], Recipient [1:560:2189]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.661187Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.661198Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:51.661202Z node 1 :NODE_BROKER DEBUG: Registration request from host4:19001 (not fixed) tenant: /dc-1/my-database 2025-07-08T12:00:51.661220Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1027 host4:19001 to database resolvehost=host4 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=0 authorizedbycertificate=false 2025-07-08T12:00:51.661251Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1027 host4:19001 2025-07-08T12:00:51.661256Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=5 2025-07-08T12:00:51.661264Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 4 to 5 2025-07-08T12:00:51.672133Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:51.672152Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1027 host4:19001 2025-07-08T12:00:51.672166Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 4 to 5 2025-07-08T12:00:51.672171Z node 1 :NODE_BROKER DEBUG: Add node #1027 host4:19001 to epoch cache 2025-07-08T12:00:51.672205Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1027 Host: "host4" Port: 19001 ResolveHost: "host4" Address: "" Location { } Expire: 7200026000 Name: "slot-0" } 2025-07-08T12:00:51.672317Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:685:2281], Recipient [1:560:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.672346Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:625:2247], Recipient [1:560:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-07-08T12:00:51.672352Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:51.672360Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-07-08T12:00:51.672401Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:51.672422Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:643:2255] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:51.672474Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:687:2282], recipient# [1:686:2189], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:51.672490Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:51.672501Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-07-08T12:00:51.672513Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:686:2189], Recipient [1:560:2189]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.672517Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:51.672526Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:51.672530Z node 1 :NODE_BROKER DEBUG: Registration request from host1:19001 (not fixed) tenant: /dc-1/my-database 2025-07-08T12:00:51.672544Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1024 host1:19001 to database resolvehost=host1 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=2 authorizedbycertificate=false 2025-07-08T12:00:51.683553Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:51.683591Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } Expire: 7200026000 Name: "slot-2" } >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> BasicUsage::FallbackToSingleDb >> BasicUsage::WaitEventBlocksBeforeDiscovery >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-07-08T12:00:52.241374Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257037Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257105Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257133Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257158Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257212Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257242Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.257276Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269540Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269606Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269633Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269657Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269682Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269704Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.269725Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.272665Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.276316Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278060Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278125Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278164Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278243Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278306Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278392Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278569Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.278670Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.278765Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.278885Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.278904Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.278955Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279004Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.279027Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279066Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279200Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279254Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.279436Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279445Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279461Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.279557Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279567Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279589Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279675Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.279783Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.280008Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.280330Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.308557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:52.308580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:52.312943Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:52.313439Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:52.313573Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:52.313758Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:52.314619Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:52.314662Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:52.314696Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:52.314710Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.314716Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.346853Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:52.346892Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:52.346899Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:52.357225Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:588:2207], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.357611Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.357623Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.357635Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.357733Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2209], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.357743Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.357745Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.357749Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.357798Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:592:2211], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.357817Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:52.357820Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.357834Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-07-08T12:00:52.357869Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:594:2213], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.357878Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-07-08T12:00:52.357880Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.357884Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-07-08T12:00:52.357922Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:596:2215], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.357942Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:52.357946Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:52.357952Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "hos ... ODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:52.364087Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:52.364095Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:608:2186], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:52.364097Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:52.364101Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:52.364103Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:52.364110Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1025 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.4 dc= location= lease=1 expire=Thu, 01 Jan 1970 02:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=1 authorizedbycertificate=false 2025-07-08T12:00:52.364120Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1025 host2:1001 2025-07-08T12:00:52.364123Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=3 2025-07-08T12:00:52.364127Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 2 to 3 2025-07-08T12:00:52.364164Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:611:2222], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.364178Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:52.364180Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:52.364185Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:52.364197Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:52.364202Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:598:2216] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:52.364215Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:613:2223], recipient# [1:612:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:52.364223Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:52.364229Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:52.364236Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:612:2186], Recipient [1:552:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:52.364240Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:52.364242Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:52.364244Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:52.364251Z node 1 :NODE_BROKER DEBUG: [DB] Update node #1025 host2:1001 location in database location=DC=1/M=2/R=3/U=4/ ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) 2025-07-08T12:00:52.374601Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:615:2225], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.374649Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.374656Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.374673Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.374752Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:617:2227], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.374776Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:52.374783Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.374801Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-07-08T12:00:52.375542Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:52.375563Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1024 host1:1001 2025-07-08T12:00:52.375571Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 1 to 2 2025-07-08T12:00:52.375576Z node 1 :NODE_BROKER DEBUG: Add node #1024 host1:1001 to epoch cache 2025-07-08T12:00:52.375623Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } 2025-07-08T12:00:52.375638Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:52.375642Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1025 host2:1001 2025-07-08T12:00:52.375646Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 2 to 3 2025-07-08T12:00:52.375649Z node 1 :NODE_BROKER DEBUG: Add node #1025 host2:1001 to epoch cache 2025-07-08T12:00:52.375661Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { } Expire: 7200025000 Name: "slot-1" } 2025-07-08T12:00:52.375667Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:52.375683Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } 2025-07-08T12:00:52.375792Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:621:2231], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.375813Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.375821Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.375831Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.375892Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:623:2233], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.375912Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:52.375916Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.375930Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-0" } } 2025-07-08T12:00:52.375985Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:625:2235], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.376000Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1025 } 2025-07-08T12:00:52.376003Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.376016Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1025 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 7200025000 Name: "slot-1" } } >> BasicUsage::RetryDiscoveryWithCancel >> BasicUsage::WriteSessionCloseWaitsForWrites >> BasicUsage::BasicWriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-07-08T12:00:51.546679Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562061Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562326Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562363Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562393Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562452Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562488Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.562529Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569311Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569343Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569386Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569419Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569436Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569451Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.569467Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.575642Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.576489Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.576873Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.578959Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579088Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579117Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579154Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579223Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579542Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.579608Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.579771Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.579843Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.579885Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.580022Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580105Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.580232Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.580347Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.580401Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580413Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580485Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580539Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580691Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.580838Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.581257Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.582772Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.610557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:51.610579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:51.615679Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:51.616101Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:51.616221Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:51.616405Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:51.617234Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:51.617280Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:51.617316Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:51.617329Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.617335Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.653418Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:51.653463Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:51.653469Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:51.663851Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:584:2207], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.664222Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:548:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:51.664231Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.664245Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.664333Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:586:2209], Recipient [1:548:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.664363Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:537:2180], Recipient [1:548:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:51.664368Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:51.664378Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:51.664429Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:51.664443Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-07-08T12:00:51.671114Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-07-08T12:00:51.671210Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoCh ... der [1:680:2259], Recipient [1:605:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.850485Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:676:2255] 2025-07-08T12:00:51.850488Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.850493Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.850504Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:680:2259] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-07-08T12:00:51.850514Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:677:2256] 2025-07-08T12:00:51.850516Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.850521Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.850558Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:678:2257] 2025-07-08T12:00:51.850561Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.850566Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.850598Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:679:2258] 2025-07-08T12:00:51.850602Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.850607Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.850620Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:680:2259] 2025-07-08T12:00:51.850623Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:51.850628Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.3 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.006029Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:605:2216], Recipient [1:605:2216]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:52.006066Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:52.006081Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2025-07-08T12:00:52.006092Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.006120Z node 1 :NODE_BROKER DEBUG: [Dirty] Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.310474Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:605:2216]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 2 } 2025-07-08T12:00:52.310498Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310506Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310607Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:674:2253] 2025-07-08T12:00:52.310613Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310618Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310653Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:676:2255] 2025-07-08T12:00:52.310656Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310660Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310670Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:677:2256] 2025-07-08T12:00:52.310673Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310676Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310682Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:678:2257] 2025-07-08T12:00:52.310685Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310689Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310695Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:679:2258] 2025-07-08T12:00:52.310697Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310700Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310706Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:680:2259] 2025-07-08T12:00:52.310709Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310712Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.310719Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:675:2254] 2025-07-08T12:00:52.310722Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.310725Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #2 2025-07-08T12:00:52.321445Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:52.321473Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321490Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.321496Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #2 nodes=2 expired=0 2025-07-08T12:00:52.321531Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321541Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321550Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321558Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321564Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321575Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321582Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.321589Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.426604Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:696:2266], Recipient [1:605:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.426661Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:605:2216]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.426669Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.426683Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.426756Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:698:2268], Recipient [1:605:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.426767Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:537:2180], Recipient [1:605:2216]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.426770Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.426776Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #2.4 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.426835Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:700:2270], Recipient [1:605:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.426855Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:537:2180], Recipient [1:605:2216]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1024 } 2025-07-08T12:00:52.426861Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-07-08T12:00:52.426881Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1024 2025-07-08T12:00:52.426890Z node 1 :NODE_BROKER DEBUG: [DB] Update node #1024 host1:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.426932Z node 1 :NODE_BROKER DEBUG: [Dirty] Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-07-08T12:00:52.441342Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2025-07-08T12:00:52.441409Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1024 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-07-08T12:00:52.441432Z node 1 :NODE_BROKER DEBUG: [Committed] Extended lease of #1024 host1:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-07-08T12:00:52.441568Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:704:2274], Recipient [1:605:2216]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.441600Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039939, Sender [1:537:2180], Recipient [1:605:2216]: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseRequest { NodeId: 1026 } 2025-07-08T12:00:52.441607Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvExtendLeaseRequest 2025-07-08T12:00:52.441621Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Execute node #1026 2025-07-08T12:00:52.441629Z node 1 :NODE_BROKER DEBUG: [DB] Update node #1026 host2:1001 lease in database lease=2 expire=1970-01-01T03:00:00.025000Z 2025-07-08T12:00:52.441679Z node 1 :NODE_BROKER DEBUG: [Dirty] Extended lease of #1026 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) 2025-07-08T12:00:52.454125Z node 1 :NODE_BROKER DEBUG: TTxExtendLease Complete 2025-07-08T12:00:52.454258Z node 1 :NODE_BROKER TRACE: TTxExtendLease reply with: NKikimr::NNodeBroker::TEvNodeBroker::TEvExtendLeaseResponse { Status { Code: OK } NodeId: 1026 Expire: 10800025000 Epoch { Id: 2 Version: 4 Start: 3600025000 End: 7200025000 NextEnd: 10800025000 } } 2025-07-08T12:00:52.454420Z node 1 :NODE_BROKER DEBUG: [Committed] Extended lease of #1026 host2:1001 up to Thu, 01 Jan 1970 03:00:00 UTC (lease 2) >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink |68.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-07-08T12:00:51.511888Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525711Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525767Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525794Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525815Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525851Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525880Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.525914Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529492Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529516Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529530Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529544Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529555Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529569Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.529665Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.534796Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.536880Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.537900Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.537960Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.537997Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538029Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538066Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538113Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538427Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538473Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538646Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538662Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538677Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538721Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538744Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538864Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538907Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.538921Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.538942Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.539065Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.539106Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:51.539218Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.539240Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.539310Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.539419Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.545596Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.545984Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.547890Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.548550Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.548579Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.548677Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.548939Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.549291Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.549882Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.550230Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.551044Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.551240Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:51.576763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:51.576782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:51.580679Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:51.581089Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:51.581192Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:51.581343Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:51.581950Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:51.582076Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:51.582114Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:51.582126Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.582132Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:51.614990Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:51.615018Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:51.615023Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:51.625926Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:596:2207], Recipient [1:560:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:51.626304Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:549:2180], Recipient [1:560:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:51.626312Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:51.626324Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:51.626368Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:51.626380Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-07-08T12:00:51.636406Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Deprica ... 60418Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160434Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:684:2252] 2025-07-08T12:00:52.160436Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160439Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160455Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:685:2253] 2025-07-08T12:00:52.160457Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160460Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160466Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:687:2255] 2025-07-08T12:00:52.160468Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160472Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160491Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:686:2254] 2025-07-08T12:00:52.160493Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160496Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160593Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:694:2262], Recipient [1:621:2220]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.160606Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:549:2180], Recipient [1:621:2220]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.160608Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160611Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160648Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:696:2264], Recipient [1:621:2220]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.160656Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:549:2180], Recipient [1:621:2220]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.160658Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.160662Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.160696Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:698:2266], Recipient [1:621:2220]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.160708Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:549:2180], Recipient [1:621:2220]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:52.160711Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.160724Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } 2025-07-08T12:00:52.160810Z node 1 :NODE_BROKER TRACE: StateWork, received event# 268829696, Sender [1:619:2219], Recipient [1:621:2220]: NKikimr::TEvTablet::TEvTabletDead 2025-07-08T12:00:52.160831Z node 1 :NODE_BROKER INFO: OnTabletDead: 72057594037936129 2025-07-08T12:00:52.160835Z node 1 :NODE_BROKER DEBUG: TNodeBroker::Cleanup 2025-07-08T12:00:52.160983Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [1:668:2072] ServerId: [1:680:2248] } 2025-07-08T12:00:52.161075Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [6:665:2072] ServerId: [1:685:2253] } 2025-07-08T12:00:52.161085Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [7:666:2072] ServerId: [1:686:2254] } 2025-07-08T12:00:52.161093Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [8:667:2072] ServerId: [1:687:2255] } 2025-07-08T12:00:52.161105Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [2:661:2072] ServerId: [1:681:2249] } 2025-07-08T12:00:52.161114Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [3:662:2072] ServerId: [1:682:2250] } 2025-07-08T12:00:52.161121Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [4:663:2072] ServerId: [1:683:2251] } 2025-07-08T12:00:52.161128Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037936129 ClientId: [5:664:2072] ServerId: [1:684:2252] } 2025-07-08T12:00:52.162658Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:52.163627Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:52.163722Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:52.163757Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:52.163811Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:52.163837Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:52.163875Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:52.163885Z node 1 :NODE_BROKER DEBUG: [DB] Loaded current epoch: #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.163908Z node 1 :NODE_BROKER DEBUG: [Dirty] Added expired node #1024 host1:1001 2025-07-08T12:00:52.163922Z node 1 :NODE_BROKER DEBUG: [DB] Loaded node #1024 host1:1001 expiring Thu, 01 Jan 1970 02:00:00 UTC 2025-07-08T12:00:52.163929Z node 1 :NODE_BROKER DEBUG: [DB] Removing node #1024 from database 2025-07-08T12:00:52.163937Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:52.163947Z node 1 :NODE_BROKER DEBUG: [Dirty] Remove node #1024 host1:1001 2025-07-08T12:00:52.163951Z node 1 :NODE_BROKER DEBUG: [Dirty] Move to new epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:52.506572Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506614Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506661Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506692Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506704Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506792Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.506813Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.519273Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:52.519365Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:52.519373Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=0 expired=0 2025-07-08T12:00:52.519764Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:763:2300], Recipient [1:706:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.519797Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: OK ServerId: [1:763:2300] Leader: 1 Dead: 0 Generation: 4 VersionInfo: } 2025-07-08T12:00:52.519833Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:706:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 4 } 2025-07-08T12:00:52.519839Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.519852Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:52.519969Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:775:2305], Recipient [1:706:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.519989Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:549:2180], Recipient [1:706:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.519994Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.520000Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:52.520049Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:777:2307], Recipient [1:706:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.520064Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:549:2180], Recipient [1:706:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.520068Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.520073Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:52.520128Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:779:2309], Recipient [1:706:2269]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.520146Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:549:2180], Recipient [1:706:2269]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:52.520151Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:52.520169Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] >> AsyncIndexChangeCollector::UpsertSingleRow >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-07-08T12:00:51.233277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00195a/r3tmp/tmpTworrY/pdisk_1.dat 2025-07-08T12:00:51.372414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.390087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:51.424034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:51.424077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:51.434690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:51.509842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:51.764278Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj10f186kds1tytngwsph, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVkZTQ3ZTQtYjk5ZjE1Y2ItNmQ0MmE1MTgtZDA2ZDc2ZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.803209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj12ccw0fa1c1f01x04ax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE1OWQyYzgtNjk0N2U0MWQtZDU2Yjg0NDAtNGNkNTYzNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.834470Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj13h3zkxq93m10cw5xjs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZjYjJiZGYtYjM4MzQ4ZDctZjM0ZDc2ZDAtOTBmNmE4NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.853103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj14551f65vrcyp907jyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJhMjU0Y2ItNmNiZDgwNTItNzEwZGQ0MGEtMTcyNmQzNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.873876Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyj14re5fh4gzysd89g2ff, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRjOTQxZjMtZDU2OTdjN2ItZDFkYTNmZGQtNjUzYjQwMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.897371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyj15dbq3jtrdf2x3ra9ze, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ2YTU0YzEtYzYyMDA1Y2EtZmE5MDA3YmEtYjdlOTFiMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.916719Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj16513edk6xa2hwjzrb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjczMjdmOWUtZDNiMGM2ZTUtOWM2OTBlYmMtYTlmYjRiYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.937372Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj16r1t2zxkc4yvyh2m4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI0ZjhhYTctMzBiZGUwODMtZDZhYWU5MmYtNmYwZmZmZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.958726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyj17c8d2khe58mh1mek7k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzczZjI3NDktNDk4NTRiY2QtN2IyZDhiOGQtNDYxYzA1NTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:51.980731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyj18454qznr8y1t5d887n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRiYzgzYzQtMTM2OTcxODYtODI4MzdlZTktYmEwNTA2ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.001972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyj18s84evbptwa8ekzzd6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFjYTA2YjYtY2RjMTZhMTItYzI5MzA0ZWMtYzQ2M2M2MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.022437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyj19e2e1tyjffw5r29xsf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFlODE0Y2YtZjM5ZjNlMzMtZjhlYTNjMS1hMmMxZGFkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.042674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyj1a103qphzfpcmy27yh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NjOTBmNjQtMmM1MDhjZjItZTAyYmQwMjctYzRhMTk4Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.062854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyj1ap7j00n4m2j50c5kw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI0YmFjMmQtYjFiMTMwMDgtZjhmMTNkYS0yZGM3ZTczMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.087076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyj1ba1ythdc8t4vfw3hpb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDVkYzg3NDEtNDUzNTkxNWEtNDEzMmQzMjUtZDFlNjk5NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.111750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyj1c53r431kvm07yt1ft8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgyOWRmZDctOTNiODY3OTQtM2NiZGYzZjMtZjA0Y2YzMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.140294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyj1cvfjazmmr8hzajvw18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg2OTgyM2YtN2EyYTg1ZjQtNWJjM2IwZmQtZGExYjkwYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.157470Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyj1dq6z7sa8thjaz9cy5z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA5MGUxMjAtOWEyYjU4MGQtZDdjMDg2YWItMmE4NWMwNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.176479Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyj1e8bf2f6jh4k8t2qb5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIwMDEwOGEtNmNlMjYwZWQtNDA4ZGM2ZmQtODNkYWM5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.196371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyj1ewdfy4n80tjtgcx1ep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTQzM2IwNmMtZWY5NjQ5YTgtNjdkYTk1OTEtNTJhNzlkNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.218219Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyj1fj0qess06r27vnggew, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg0ODViODgtNTM1ZmVhZGEtYmU1YzE2MmMtNTZjM2Q3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.238144Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyj1g5bk6jgm9ean6p4scc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg0N2U2OGEtYzE4NWMyZGUtOTZkMTg4YjQtOWQzYzllZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.259757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jzmyj1gt1wk5mesx74pgmqaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzU3YzU0NTUtMjdlZDEzOWMtZWU5ZDlmN2QtZjkyZWZmY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.281120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jzmyj1hf4184ah8e7dxfp66z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhiMmZmMWUtNTBiNzk3YjItMjMzZGNhN2UtOTJjMjdhZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.301991Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jzmyj1j43pswbj6ktdjtravb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGJiYjMxYTktMWM3ZmI0NTktYmYwYmJlMGUtNGY1YWQxNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.321698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jzmyj1jsfq049rkyjs1md8dk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI5NWY1ZGEtYmYyM2UxZWMtOThiOWIyZjAtNDFjMGE0Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.341325Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jzmyj1kd7vrq8mjt8wd99sjh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzQ4ZmVkZWQtNzNkMTEwYjItMTdjYjViMmUtNWY3ZmRkMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.360813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jzmyj1m0bbmvsct4rsw4143e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI4ODIzYTQtZmQzYjJjYTYtNzY1ZjdiYzYtZjVjMjU3Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.381198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jzmyj1mmbafxhskzmpy5bphv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ3M2FkNDItZThiYzdkZWYtODcwZGUwMTUtZmE0N2I2ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.401941Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jzmyj1n85q81axvdysqw0a7n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2EyNWIyNzAtMjc3NzEwNGUtMWJmYzE5NGItYWFkMjdmOTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.422427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jzmyj1nx91pa6eatjkxmj6jv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAwYTJjMWMtMzAyMGExYjktODkxZDM2YTctYjkwNTdmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.441269Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jzmyj1phfgd0hbsncxw4cq7n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWY1NTU2YmQtNWEzMjhkZjgtNmUxYzQ3Y2QtYTgyYWY1OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:52.462959Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jzmyj1q54qw955t5ad58z30r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE3NTI2NDgtMjkzMjhkOTUtYTA5ZDE2NWItYjYzNmQ0YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Datab ... base: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RiMGQ5YTktZDQwNThmYTAtYjE5ODQwMmEtNTU0YjMyMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.244459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715724. Ctx: { TraceId: 01jzmyj2fjc38yk4352hhgkfmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGYzYzE0NjMtZjFiMTMxMi02ZmY5ODg1LTQ5ZWU5MTY0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.268905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jzmyj2g9epq6pvp2r54yp7bb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTdjYTcwN2YtNjAwMTg2MTQtZTExNzFkODctMWQxZTJmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.301822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jzmyj2h0fk4f5bpth03ppj7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWZhNTJlNDUtYjY0ODJkZDgtYTM0MWNhNTYtZmE2YjYzYWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.323199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jzmyj2j1ahzmzgv9mexnbejt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY0NDk2MC1jYzg0MDVhLTMzOGVjZmUwLWRjYTg1ZWZm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.344114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jzmyj2jq9zbz9k6zk38vybzv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBkMTZkLTRlYTM3ODM5LTEyMGRlNzNkLWNlYWM4MmQ5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.365609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jzmyj2kb0p5q38q2yfyxketz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM5NjRiNzItZDUyNmE4MTktNTdkZjg1OTgtNzNiODNmODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.386461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jzmyj2m16ww7tj3n507f674y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI5ODVlNDItZWE4ZjVjYzEtMjQ3ZjJmM2MtYTllZmRlYjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.405729Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jzmyj2mp5x334gtdarwrka0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjcyMzJmOGItZWQ2YWU5NDItMThlNjUwZGYtMTM5MDQyNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.427600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jzmyj2na0yagbscmjywq9k72, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTIzODU4OTYtZjg4M2U5ZTYtZjljYWMzNTEtMzRkYWNhM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.449904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jzmyj2nz9dah136feq2gamr0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk3MTZjYzQtYzNlZDFmODUtNzk1NzE1ZTEtYjdkYjI2YTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.471750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jzmyj2pn4k5mket79gdtqmnw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmIyNDkxODctNGZmOTgzNC0xMDM1OWMzOC1lZjUwNDQ3OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.492861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jzmyj2qb64tj365tze4fxnzw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU2NjI5NTQtYzk1ODA5OWYtNjRjZDBlYmEtM2IxNWM3NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.512644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jzmyj2r06bw34x8avvs3jfdv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E1ZWViYjctYmM3OTUzMWUtZmMwODhhY2UtNTAxODMyZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.533747Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jzmyj2rm7xmghvyrf9qky2z8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIzNGE1NDItOTVhZTkxNTAtNzgwZDdhMzEtNjQxMDJiNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.553736Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jzmyj2s91w5njt2z6nt9vr92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZkMDI4MWMtNjAxZTQxNGUtYzE5MmU2ZTYtMzdiOTZhODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.575755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jzmyj2sxd2yj9mty36prxhag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODAxODJhYWMtOGQ4YjA0YTUtNDdiYTkxMTQtOWRlMmE5NTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.598146Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jzmyj2tkdnt7j2dp5yn4c0g6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE3YzVmZGQtMThhOGJhZWYtNjEwOTVhNWEtNGQwYzBlNTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.623120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jzmyj2va6jx9vvgrgt6wp5a4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjEyZWYzOTItZGM2NWFjZi1hMThiNzRkMy1kMTQwNTdmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.647547Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jzmyj2w2dc37dyxnag6h8apj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTExNzY3N2YtYTZkZjhlNS1lODc1MTY0NC05ZGUxYzg1Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.668704Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jzmyj2wv63jyb7havyez7q9e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTBmNGI0ZjUtNzk0ODNiNWMtZTc0OGE1YzMtNTIzMzVjZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.689496Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jzmyj2xg1yc4d82ya0jpht1h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg1NzEwZmEtOGJiMzE5ZDUtNjE0ZDNlMjAtYjE0MTY1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.710243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jzmyj2y5210q8ntjc65ht5vt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg0ZjljZjktOGVhMGRmOGYtOTU4ZDhkYWYtZWFiNDk2OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.733164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jzmyj2yt0qyy7khks3wsz1rj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGYyODE3MWUtOTY0NzkxOTYtNzc2NjllZGQtZDliZWI1NWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.755150Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jzmyj2zhe3wjvhy95kgp3w0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTEwMzY4NmItZTE3OTY1MzMtYTY5NzhhZS05ZDVhMjRjYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.779124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jzmyj3082v18jcjq65n5d9xw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTMxNzI0N2ItMTRkNTFkN2MtNmMyMmU2ZjEtMjBlYmUxNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.806226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jzmyj30z4y5was0w5axm8b69, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjJmYTMwMjItY2IxODllMzAtZTI3ZWQ4N2YtNTY1NDg2NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.833059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jzmyj31t8b100t4ksbw15e7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGU0MzU1NDUtZWVjNGQwM2MtNDUwNDI1YWQtNjYwNzNjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.854587Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jzmyj32nf8f7x2nj4xbc12hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIzNThiNGEtYmI0NzljZjYtNDcxZDJiZjQtMmEyM2MxMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.877326Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jzmyj33a9685ejh9950cfa11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTY0Y2IwYmMtMzY3ZWQzNTQtODg2OTE4YjYtNDFiMDEzZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.898586Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jzmyj341ehk1pej9ym5nz1fv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZlNWVmZDktYWUzNTMyZTAtODcwOGY1N2UtOGI4NTQxNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.919679Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jzmyj34pd3xjgkd9r2jpc4pq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODRhOGI5NWQtYjRhNmQxZDktZDNmZDZmMzctODg1Mjk4NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.940767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jzmyj35b018p5p58ghh5yznz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJiMThiOTMtNjNjYTdkMzItNjdiNWY5MTktMTFlN2E4ZGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.961988Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jzmyj3604bssr1s75j300ve8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU3OWE4MDAtMTllZGZjZWYtYTFhMzA3NmYtNTU2NmU3OTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:53.985824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jzmyj36r80vpb0nn2c66azgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzNjZDhiY2MtNTY5MDc5YTEtYTNiY2U3ZGQtYjcyNTU2NjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:54.000347Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:54.009431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.226150Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jzmyj3e35jeh6b49yhhdkt5y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA1NmUyY2MtYjg5Njc3NmItYWY4MGY4ZmYtMjVjMzdhYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> TNodeBrokerTest::BasicFunctionality [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-07-08T12:00:52.269331Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281076Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281129Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281152Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281185Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281222Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281256Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.281301Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285573Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285594Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285618Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285639Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285649Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285658Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.285667Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.290487Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.291395Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.291773Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292276Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292355Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292374Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292402Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292440Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292691Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.292742Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.292862Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292895Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.292912Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.293013Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.293101Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293130Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293198Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.293308Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.293342Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293370Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.293466Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293486Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293537Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293641Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293699Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.293789Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.294002Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.295404Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.295905Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.297080Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.297260Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.321461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:52.321483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:52.326534Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:52.327049Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:52.327155Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:52.327348Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:52.328291Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:52.328323Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:52.328363Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:52.328375Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.328381Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.360764Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:52.360804Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:52.360810Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:52.371167Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2207], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.371573Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:52.371584Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:52.371595Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:52.371658Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:52.371673Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-07-08T12:00:52.378655Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-07-08T12:00:52.378759Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusS ... : 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 } 2025-07-08T12:00:53.743265Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:722:2267] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:00:53.743297Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:722:2267] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DomainId: [OwnerId: 72057594046678944, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:53.743355Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:729:2268], recipient# [1:721:2186], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:53.743369Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1 TableId: [72057594046678944:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 1] Params { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:53.743384Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1": scope id# <72057594046678944:1>: serviced subdomain# 72057594046678944:1 2025-07-08T12:00:53.743400Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:721:2186], Recipient [1:554:2186]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:53.743404Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:53.743422Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:53.743426Z node 1 :NODE_BROKER DEBUG: Registration request from host2:1001 (not fixed) tenant: dc-1 2025-07-08T12:00:53.743453Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1024 host2:1001 to database resolvehost=host2.yandex.net address=1.2.3.4 dc=1 location=DC=1/M=2/R=3/U=4/ lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:1 slotindex=0 authorizedbycertificate=false 2025-07-08T12:00:53.743491Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1024 host2:1001 2025-07-08T12:00:53.743496Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=6 2025-07-08T12:00:53.743504Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 5 to 6 2025-07-08T12:00:53.743592Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:731:2270], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:53.743612Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:53.743616Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:53.743628Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #3.4 1970-01-01T02:00:00.025000Z - 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:53.743694Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:733:2272], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:53.743708Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:53.743712Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:53.743723Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: WRONG_REQUEST Reason: "Unknown node" } } ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2025-07-08T12:00:53.744467Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:53.744485Z node 1 :NODE_BROKER DEBUG: [Committed] Remove node #1024 host1:1001 2025-07-08T12:00:53.744494Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744513Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.025000Z 2025-07-08T12:00:53.744517Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=0 expired=0 2025-07-08T12:00:53.744527Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744534Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744544Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744551Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744559Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744566Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744573Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.744580Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.5 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.755722Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:53.755746Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1024 host2:1001 2025-07-08T12:00:53.755755Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 5 to 6 2025-07-08T12:00:53.755760Z node 1 :NODE_BROKER DEBUG: Add node #1024 host2:1001 to epoch cache 2025-07-08T12:00:53.755818Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } 2025-07-08T12:00:53.755948Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:740:2279], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:53.755974Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:53.755979Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:53.755990Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.756070Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:742:2281], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:53.756084Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:53.756088Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:53.756093Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.6 1970-01-01T03:00:00.025000Z - 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:53.756147Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:744:2283], Recipient [1:554:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:53.756164Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039937, Sender [1:543:2180], Recipient [1:554:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode { NodeId: 1024 } 2025-07-08T12:00:53.756169Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvResolveNode 2025-07-08T12:00:53.756186Z node 1 :NODE_BROKER TRACE: Send TEvResolvedNode: NKikimr::NNodeBroker::TEvNodeBroker::TEvResolvedNode { Status { Code: OK } Node { NodeId: 1024 Host: "host2" Port: 1001 ResolveHost: "host2.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } Expire: 18000025000 Name: "slot-0" } } >> YdbTableSplit::SplitByLoadWithReads >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-07-08T12:00:52.535365Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549303Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549365Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549391Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549415Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549471Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549503Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.549542Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559223Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559249Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559288Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559317Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559334Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559346Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.559360Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.565672Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.566574Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.566958Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567508Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567580Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567620Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567650Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567696Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.567996Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568055Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568184Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568221Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568295Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568332Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568429Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568524Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568561Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568583Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568622Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568777Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.568832Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568927Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.568974Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569013Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569039Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569118Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569262Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569341Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.569594Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.570023Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.570830Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.598421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:52.598441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:52.601943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 2025-07-08T12:00:52.606380Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:52.606850Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:52.606909Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:52.607072Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:00:52.607821Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:52.607842Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:52.607880Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:52.607893Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:52.607899Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:52.640932Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:52.640993Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.026000Z 2025-07-08T12:00:52.640999Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:52.641071Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:602:2227], Recipient [1:556:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.641424Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:599:2225], Recipient [1:556:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.641436Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.641448Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.026000Z - 1970-01-01T01:00:00.026000Z - 1970-01-01T02:00:00.026000Z 2025-07-08T12:00:52.641552Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:615:2232], Recipient [1:556:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.641589Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:599:2225], Recipient [1:556:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-07-08T12:00:52.641595Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:52.641604Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 19001 ResolveHost: "host1" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-07-08T12:00:52.641652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:52.641666Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1/my-database, domainOwnerId# 72057594046678944 2025-07-08T12:00:52.646378Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/my-database PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/my-database" PathDescription { Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Ve ... sing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.226512Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2025-07-08T12:00:54.226517Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:670:2254] 2025-07-08T12:00:54.226520Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.226523Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2025-07-08T12:00:54.226529Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:675:2259] 2025-07-08T12:00:54.226532Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.226534Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #4 2025-07-08T12:00:54.237567Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:54.237596Z node 1 :NODE_BROKER DEBUG: [Committed] Remove node #1025 host2:19001 2025-07-08T12:00:54.237607Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237625Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T04:00:00.026000Z 2025-07-08T12:00:54.237631Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #4 nodes=3 expired=0 2025-07-08T12:00:54.237660Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237670Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237679Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237686Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237693Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237703Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237711Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.237719Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.258814Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:772:2314], Recipient [1:556:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.258855Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:599:2225], Recipient [1:556:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.258860Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.258874Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #4.8 1970-01-01T03:00:00.026000Z - 1970-01-01T04:00:00.026000Z - 1970-01-01T05:00:00.026000Z 2025-07-08T12:00:54.258957Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:774:2316], Recipient [1:556:2189]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.259001Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:599:2225], Recipient [1:556:2189]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" } 2025-07-08T12:00:54.259005Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:54.259011Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database" 2025-07-08T12:00:54.259053Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:54.259070Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1/my-database, domainOwnerId# 72057594046678944 2025-07-08T12:00:54.259395Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/my-database PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/my-database" PathDescription { Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 } 2025-07-08T12:00:54.259440Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1/my-database PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1/my-database" PathDescription { Self { Name: "my-database" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944 }, by path# { Subscriber: { Subscriber: [1:776:2317] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:00:54.259479Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:23:2070], cacheItem# { Subscriber: { Subscriber: [1:776:2317] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 5000001 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/my-database TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:00:54.259539Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:783:2318], recipient# [1:775:2189], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:00:54.259555Z node 1 :NODE_BROKER TRACE: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: response# { Path: dc-1/my-database TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-07-08T12:00:54.259567Z node 1 :NODE_BROKER TRACE: Finished resolving tenant: request# Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } FixedNodeId: false Path: "/dc-1/my-database": scope id# <72057594046678944:2>: serviced subdomain# 72057594046678944:2 2025-07-08T12:00:54.259581Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435073, Sender [1:775:2189], Recipient [1:556:2189]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:54.259586Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvResolvedRegistrationRequest 2025-07-08T12:00:54.259609Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Execute 2025-07-08T12:00:54.259613Z node 1 :NODE_BROKER DEBUG: Registration request from host5:19001 (not fixed) tenant: /dc-1/my-database 2025-07-08T12:00:54.259640Z node 1 :NODE_BROKER DEBUG: [DB] Adding node #1025 host5:19001 to database resolvehost=host5 address= dc= location= lease=1 expire=Thu, 01 Jan 1970 05:00:00 UTC servicedsubdomain=72057594046678944:2 slotindex=1 authorizedbycertificate=false 2025-07-08T12:00:54.259683Z node 1 :NODE_BROKER DEBUG: [Dirty] Added node #1025 host5:19001 2025-07-08T12:00:54.259689Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch version in database version=9 2025-07-08T12:00:54.259696Z node 1 :NODE_BROKER DEBUG: [Dirty] Update current epoch version from 8 to 9 2025-07-08T12:00:54.270811Z node 1 :NODE_BROKER DEBUG: TTxRegisterNode Complete 2025-07-08T12:00:54.270838Z node 1 :NODE_BROKER DEBUG: [Committed] Added node #1025 host5:19001 2025-07-08T12:00:54.270846Z node 1 :NODE_BROKER DEBUG: [Committed] Update current epoch version from 8 to 9 2025-07-08T12:00:54.270850Z node 1 :NODE_BROKER DEBUG: Add node #1025 host5:19001 to epoch cache 2025-07-08T12:00:54.270899Z node 1 :NODE_BROKER TRACE: TTxRegisterNode reply with: Status { Code: OK } Node { NodeId: 1025 Host: "host5" Port: 19001 ResolveHost: "host5" Address: "" Location { } Expire: 18000026000 Name: "slot-1" } >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-07-08T12:00:52.178443Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188615Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188698Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188729Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188753Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188794Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188828Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.188868Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193686Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193715Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193732Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193750Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193764Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193785Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.193890Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 1 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.198878Z node 1 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.200811Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.201914Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.201977Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202014Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202044Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202075Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202122Z node 1 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202452Z node 5 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.202502Z node 6 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.202692Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 5 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.202711Z node 7 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.202805Z node 2 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.202833Z node 4 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.202908Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 4 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203000Z node 3 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.203025Z node 4 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203156Z node 8 :NAMESERVICE DEBUG: Handle NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936129 Status: ERROR ServerId: [0:0:0] Leader: 1 Dead: 0 Generation: 0 VersionInfo: } 2025-07-08T12:00:52.203233Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203287Z node 5 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203352Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 6 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203951Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.203972Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.204295Z node 6 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 7 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.204763Z node 7 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.205193Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 3 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.205578Z node 3 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.207721Z node 8 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 2 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.207880Z node 2 :NAMESERVICE DEBUG: Handle NActors::TEvInterconnect::TEvGetNode { NodeId: 8 Deadline: 18446744073709.551615s } 2025-07-08T12:00:52.231158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:52.231177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:52.235200Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060000 event: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:52.235576Z node 1 :NODE_BROKER DEBUG: StateInit event type: 10060001 event: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:52.235666Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Execute 2025-07-08T12:00:52.235832Z node 1 :NODE_BROKER DEBUG: StateInit event type: 1006000c event: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:52.236530Z node 1 :NODE_BROKER DEBUG: TTxInitScheme Complete 2025-07-08T12:00:52.236666Z node 1 :NODE_BROKER DEBUG: TTxLoadState Execute 2025-07-08T12:00:52.236705Z node 1 :NODE_BROKER DEBUG: [DB] Using default config. 2025-07-08T12:00:52.236717Z node 1 :NODE_BROKER DEBUG: [DB] Starting the first epoch: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.236723Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.268781Z node 1 :NODE_BROKER DEBUG: TTxLoadState Complete 2025-07-08T12:00:52.268820Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T01:00:00.025000Z 2025-07-08T12:00:52.268827Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #1 nodes=0 expired=0 2025-07-08T12:00:52.279200Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:588:2207], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.279598Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:52.279609Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:52.279622Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #1.1 1970-01-01T00:00:00.025000Z - 1970-01-01T01:00:00.025000Z - 1970-01-01T02:00:00.025000Z 2025-07-08T12:00:52.279727Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:590:2209], Recipient [1:552:2186]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:52.279762Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039938, Sender [1:541:2180], Recipient [1:552:2186]: NKikimr::NNodeBroker::TEvNodeBroker::TEvRegistrationRequest { Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" } 2025-07-08T12:00:52.279768Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvRegistrationRequest 2025-07-08T12:00:52.279778Z node 1 :NODE_BROKER TRACE: Handle TEvNodeBroker::TEvRegistrationRequest: request# Host: "host1" Port: 1001 ResolveHost: "host1.yandex.net" Address: "1.2.3.4" Location { DataCenter: "1" Module: "2" Rack: "3" Unit: "4" } FixedNodeId: false Path: "dc-1" 2025-07-08T12:00:52.279833Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:23:2070], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:00:52.279851Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:23:2070], path# /dc-1, domainOwnerId# 72057594046678944 2025-07-08T12:00:52.286982Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:23:2070], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 ... e 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.137534Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #5 2025-07-08T12:00:54.153456Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:54.153487Z node 1 :NODE_BROKER DEBUG: [Committed] Node #1026 host3:1001 has expired 2025-07-08T12:00:54.153496Z node 1 :NODE_BROKER DEBUG: [Committed] Node #1027 host1:1001 has expired 2025-07-08T12:00:54.153504Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153531Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T05:00:00.025000Z 2025-07-08T12:00:54.153537Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #5 nodes=2 expired=2 2025-07-08T12:00:54.153582Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153592Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153602Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153609Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153619Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153627Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153635Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.153642Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.176454Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:867:2361], Recipient [1:678:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.176516Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.176524Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.176541Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.176639Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:869:2363], Recipient [1:678:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.176656Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.176660Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.176665Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.176727Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:871:2365], Recipient [1:678:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.176737Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.176741Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.176746Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #5.10 1970-01-01T04:00:00.025000Z - 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.386498Z node 1 :NODE_BROKER TRACE: StateWork, received event# 2146435072, Sender [1:678:2251], Recipient [1:678:2251]: NKikimr::NNodeBroker::TNodeBroker::TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:54.386522Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvPrivate::TEvUpdateEpoch 2025-07-08T12:00:54.386541Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Execute 2025-07-08T12:00:54.386548Z node 1 :NODE_BROKER DEBUG: [DB] Removing node #1026 from database 2025-07-08T12:00:54.386571Z node 1 :NODE_BROKER DEBUG: [DB] Removing node #1027 from database 2025-07-08T12:00:54.386581Z node 1 :NODE_BROKER DEBUG: [DB] Update epoch in database: #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.386600Z node 1 :NODE_BROKER DEBUG: [Dirty] Node #1024 host1:1001 has expired 2025-07-08T12:00:54.386609Z node 1 :NODE_BROKER DEBUG: [Dirty] Node #1025 host4:1001 has expired 2025-07-08T12:00:54.386616Z node 1 :NODE_BROKER DEBUG: [Dirty] Remove node #1026 host3:1001 2025-07-08T12:00:54.386622Z node 1 :NODE_BROKER DEBUG: [Dirty] Remove node #1027 host1:1001 2025-07-08T12:00:54.386630Z node 1 :NODE_BROKER DEBUG: [Dirty] Move to new epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.459980Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:25:2072], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { MinEpoch: 6 } 2025-07-08T12:00:54.460009Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460016Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460107Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [6:165:2072], Recipient [1:813:2326] 2025-07-08T12:00:54.460112Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460116Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460136Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [7:193:2072], Recipient [1:814:2327] 2025-07-08T12:00:54.460139Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460142Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460155Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [8:221:2072], Recipient [1:815:2328] 2025-07-08T12:00:54.460158Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460162Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460169Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [3:81:2072], Recipient [1:810:2323] 2025-07-08T12:00:54.460172Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460176Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460182Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [4:109:2072], Recipient [1:811:2324] 2025-07-08T12:00:54.460185Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460189Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460196Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [5:137:2072], Recipient [1:812:2325] 2025-07-08T12:00:54.460199Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460202Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.460211Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [2:53:2072], Recipient [1:816:2329] 2025-07-08T12:00:54.460214Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.460218Z node 1 :NODE_BROKER DEBUG: Delaying list nodes request for epoch #6 2025-07-08T12:00:54.471898Z node 1 :NODE_BROKER DEBUG: TTxUpdateEpoch Complete 2025-07-08T12:00:54.471928Z node 1 :NODE_BROKER DEBUG: [Committed] Node #1024 host1:1001 has expired 2025-07-08T12:00:54.471936Z node 1 :NODE_BROKER DEBUG: [Committed] Node #1025 host4:1001 has expired 2025-07-08T12:00:54.471942Z node 1 :NODE_BROKER DEBUG: [Committed] Remove node #1026 host3:1001 2025-07-08T12:00:54.471947Z node 1 :NODE_BROKER DEBUG: [Committed] Remove node #1027 host1:1001 2025-07-08T12:00:54.471955Z node 1 :NODE_BROKER DEBUG: [Committed] Move to new epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.471973Z node 1 :NODE_BROKER TRACE: Scheduled epoch update at 1970-01-01T06:00:00.025000Z 2025-07-08T12:00:54.471979Z node 1 :NODE_BROKER DEBUG: Preparing nodes list cache for epoch #6 nodes=0 expired=2 2025-07-08T12:00:54.472010Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472018Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472026Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472032Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472038Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472044Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472051Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.472059Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.493004Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:894:2376], Recipient [1:678:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.493061Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.493069Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.493084Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z 2025-07-08T12:00:54.493159Z node 1 :NODE_BROKER TRACE: StateWork, received event# 269877761, Sender [1:896:2378], Recipient [1:678:2251]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:54.493170Z node 1 :NODE_BROKER TRACE: StateWork, received event# 272039936, Sender [1:541:2180], Recipient [1:678:2251]: NKikimr::NNodeBroker::TEvNodeBroker::TEvListNodes { } 2025-07-08T12:00:54.493172Z node 1 :NODE_BROKER TRACE: StateWork, processing event TEvNodeBroker::TEvListNodes 2025-07-08T12:00:54.493175Z node 1 :NODE_BROKER TRACE: Send TEvNodesInfo for epoch #6.11 1970-01-01T05:00:00.025000Z - 1970-01-01T06:00:00.025000Z - 1970-01-01T07:00:00.025000Z |68.5%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:00:04.865864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:04.865903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:04.865909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:04.865914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:04.865929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:04.865933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:04.865942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:04.865953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:04.866035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:04.878031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:00:04.878057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:04.881240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:04.881286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:04.881314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:04.882781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:04.882850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:04.882936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.883100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:04.883854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.883903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:04.884140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:04.884162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.884179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:04.884185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:04.884190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:04.884215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.885313Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:00:04.901850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:04.901903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.901944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:04.901979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:04.901986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.902406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.902422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:04.902444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.902449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:04.902452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:04.902455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:04.902771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.902781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:04.902786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:04.903084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.903095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.903100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.903107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.903642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:04.903986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:04.904026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:04.904193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:04.904209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:04.904217Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.904280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:04.904287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:04.904312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:04.904323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:04.904590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:04.904595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:04.904632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:04.904636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:04.904646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:04.904652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:04.904661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:04.904664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.904668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:04.904671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.904676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:04.904681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:04.904686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:00:04.904690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:00:04.904701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:04.904706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:00:04.904710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:00:04.905090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:00:04.905102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... rtitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:55.682008Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:55.682034Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 27us result status StatusSuccess 2025-07-08T12:00:55.682154Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:55.682245Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:00:55.682262Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 19us result status StatusSuccess 2025-07-08T12:00:55.682348Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::TestRetentionOnLongTxAndBigMessages >> Initializer::Simple [GOOD] |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-07-08T11:59:52.412879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00139a/r3tmp/tmpTkaObi/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16800, node 1 TClient is connected to server localhost:7268 2025-07-08T11:59:52.615822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T11:59:52.639003Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T11:59:52.639946Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T11:59:52.639959Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T11:59:52.639963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T11:59:52.640040Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:59:52.681521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:59:52.681559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:59:52.692254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:02.874850Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:615:2518], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:02.875519Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDg1ZDE0NWEtYjE1ODYxNzktNTUzOTUyZjctODEyOTc0Y2Y=, ActorId: [1:611:2515], ActorState: ExecuteState, TraceId: 01jzmygh81ez919k7kz6tchp2s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2025-07-08T12:00:02.970100Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-07-08T12:00:03.090581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-07-08T12:00:03.315027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:00:03.402162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:00:03.810252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 Initialization finished 2025-07-08T12:00:14.191110Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmygwb1fp097xvhwgtkgwj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDAwZDM3ZDktYjc1Y2VlMTgtZDViZjA3NDEtZGZmMzgzNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-07-08T12:00:24.613205Z node 1 :TX_PROXY ERROR: Actor# [1:1155:2923] txid# 281474976715674, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-07-08T12:00:24.613338Z node 1 :TX_PROXY ERROR: Actor# [1:1155:2923] txid# 281474976715674, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-07-08T12:00:35.435677Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyhh2472snw7rafj9myg7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzA2MDE3MjUtMmE4MjAyYzUtODNmMmM3LWI3YzUyOGJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-07-08T12:00:56.124764Z node 1 :TX_PROXY ERROR: Actor# [1:1350:3068] txid# 281474976715682, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-07-08T12:00:56.124824Z node 1 :TX_PROXY ERROR: Actor# [1:1350:3068] txid# 281474976715682, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> Secret::DeactivatedQueryService [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:00:55.103380Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679860063316555:2149];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.104039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016ee/r3tmp/tmpjj5G1L/pdisk_1.dat 2025-07-08T12:00:55.132928Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.134637Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.135964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:00:55.161972Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3133, node 1 2025-07-08T12:00:55.177210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016ee/r3tmp/yandextCBEN5.tmp 2025-07-08T12:00:55.177220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016ee/r3tmp/yandextCBEN5.tmp 2025-07-08T12:00:55.179794Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016ee/r3tmp/yandextCBEN5.tmp 2025-07-08T12:00:55.179838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:55.189111Z INFO: TTestServer started on Port 20763 GrpcPort 3133 TClient is connected to server localhost:20763 PQClient connected to localhost:3133 === TenantModeEnabled() = 1 === Init PQ - start server on port 3133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:55.228587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.228608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.233202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.233221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.233490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.239373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:00:55.239417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.239465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:00:55.239504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:55.239511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:00:55.240107Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:55.240147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:00:55.240156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:00:55.240159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-07-08T12:00:55.240620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.240744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:00:55.241093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.241101Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.241104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.241108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.241709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:55.242142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:00:55.242176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:00:55.242374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.242379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-07-08T12:00:55.242382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.245528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976055291, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.245556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976055291 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:55.245560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.245631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:00:55.245640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.245665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:00:55.245672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:00:55.261274Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:55.261285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:00:55.261331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:55.261334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679863500274642:2385], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:00:55.261342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.261347Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:00:55.261359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.261361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.261365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.261366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.261369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:00:55.261373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.261376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:00:55.261377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:00:55.261407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-07-08T12:00:55.261411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 1 2025-07-08T12:00:55.261413Z ... 6715662:0 2025-07-08T12:00:57.047122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-07-08T12:00:57.047134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2025-07-08T12:00:57.047136Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-07-08T12:00:57.047137Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-07-08T12:00:57.047152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 5 2025-07-08T12:00:57.047154Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-07-08T12:00:57.047156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 7], 5 2025-07-08T12:00:57.047157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 8], 2 2025-07-08T12:00:57.047458Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-07-08T12:00:57.047469Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 7 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-07-08T12:00:57.047472Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-07-08T12:00:57.047475Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 7], version: 5 2025-07-08T12:00:57.047478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 2 2025-07-08T12:00:57.047515Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-07-08T12:00:57.047520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 8 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-07-08T12:00:57.047521Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-07-08T12:00:57.047523Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 8], version: 2 2025-07-08T12:00:57.047525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 4 2025-07-08T12:00:57.047530Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-07-08T12:00:57.047533Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7524679867385966912:2314] 2025-07-08T12:00:57.048432Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-07-08T12:00:57.048446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-07-08T12:00:57.155979Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:00:57.155993Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-07-08T12:00:57.156177Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-07-08T12:00:57.156197Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:56872 2025-07-08T12:00:57.156202Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56872 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-07-08T12:00:57.156205Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:00:57.157148Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-07-08T12:00:57.157182Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:00:57.157183Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:00:57.157185Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:00:57.157194Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679871680934473:2326] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:00:57.157199Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:00:57.157584Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:00:57.157690Z node 3 :PERSQUEUE INFO: new Cookie 12345678|c25edde-b925ad4d-73064387-435097c8_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-07-08T12:00:57.157831Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c25edde-b925ad4d-73064387-435097c8_0 2025-07-08T12:00:57.158207Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|c25edde-b925ad4d-73064387-435097c8_0 grpc read done: success: 0 data: 2025-07-08T12:00:57.158212Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c25edde-b925ad4d-73064387-435097c8_0 grpc read failed 2025-07-08T12:00:57.158253Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|c25edde-b925ad4d-73064387-435097c8_0 2025-07-08T12:00:57.158256Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c25edde-b925ad4d-73064387-435097c8_0 is DEAD 2025-07-08T12:00:57.158301Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed 2025-07-08T12:00:57.162605Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:00:57.162616Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-07-08T12:00:57.162781Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-07-08T12:00:57.162811Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:56872 2025-07-08T12:00:57.162815Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:56872 proto=v1 topic=topic1 durationSec=0 2025-07-08T12:00:57.162819Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:00:57.163358Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-07-08T12:00:57.163386Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:00:57.163387Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:00:57.163389Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:00:57.163398Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679871680934493:2335] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:00:57.163403Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:00:57.163646Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:00:57.163770Z node 3 :PERSQUEUE INFO: new Cookie 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-07-08T12:00:57.167729Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 2025-07-08T12:00:57.175398Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 grpc read done: success: 0 data: 2025-07-08T12:00:57.175411Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 grpc read failed 2025-07-08T12:00:57.175418Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 grpc closed 2025-07-08T12:00:57.175421Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|8f6a8a0c-cff2958c-85949ed2-1e6d4dd5_0 is DEAD 2025-07-08T12:00:57.175583Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:00:57.345294Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-07-08T12:00:37.294305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a2d/r3tmp/tmpI4GWmn/pdisk_1.dat 2025-07-08T12:00:37.427891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.443727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.478116Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:00:37.478389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:37.486773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:37.486889Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:00:37.497441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:37.571544Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T12:00:37.571569Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T12:00:37.571598Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:578:2498] 2025-07-08T12:00:37.596233Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T12:00:37.596276Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T12:00:37.596473Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T12:00:37.596491Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T12:00:37.596554Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T12:00:37.596598Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T12:00:37.596613Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T12:00:37.597066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.597197Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T12:00:37.597318Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T12:00:37.597329Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T12:00:37.610999Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:37.611222Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:37.611312Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:37.611372Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.620501Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:37.620690Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.620715Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:37.620876Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:37.620885Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:37.620892Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:37.642111Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:37.642183Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:37.642204Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:37.642302Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:37.646113Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:37.646176Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:37.646197Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:37.646202Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:37.646207Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:37.646212Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:37.647351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.647374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.647480Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:37.647499Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:37.647511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:37.647518Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:37.647524Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:37.647529Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:37.647533Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:37.647538Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:37.647543Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:37.647643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.647650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.647657Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:37.647668Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:37.647672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:37.648290Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:37.648351Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:37.648362Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:37.648378Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:37.648385Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:37.648389Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:37.648394Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:37.648398Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.648459Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:37.648466Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:37.648469Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:37.648472Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.648484Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:37.648487Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:37.648490Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:37.648493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.648497Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:37.649505Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:37.649518Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.649523Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.649531Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:37.649542Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:37.649925Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00 ... 037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-07-08T12:00:57.398737Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:00:57.398743Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715661 2025-07-08T12:00:57.398749Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-07-08T12:00:57.398757Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-07-08T12:00:57.398767Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:603:2519], Recipient [13:693:2584]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-07-08T12:00:57.398770Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:00:57.398774Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-07-08T12:00:57.398782Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-07-08T12:00:57.398788Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-07-08T12:00:57.398799Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:693:2584], Recipient [13:603:2519]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:57.398805Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:00:57.398809Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715661 2025-07-08T12:00:57.398813Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:57.398845Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:817:2666], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:57.398867Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:603:2519], Recipient [13:693:2584]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:57.398870Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:00:57.398874Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-07-08T12:00:57.398879Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-07-08T12:00:57.398895Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715661] from 72075186224037889 at tablet 72075186224037889 send result to client [13:817:2666], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715661 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 284 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } 2025-07-08T12:00:57.398996Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:57.399024Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715661 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 271 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-07-08T12:00:57.399295Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:57.399960Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-07-08T12:00:57.399983Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:603:2519], Recipient [13:693:2584]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-07-08T12:00:57.399988Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:00:57.399994Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2025-07-08T12:00:57.400069Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-07-08T12:00:57.400140Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:693:2584], Recipient [13:603:2519]: {TEvReadSet step# 2500 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-07-08T12:00:57.400144Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:00:57.400148Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-07-08T12:00:57.419553Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] Handle TEvExecuteKqpTransaction 2025-07-08T12:00:57.419643Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-07-08T12:00:57.419846Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyj6hr10qf6ebbhze0vvvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YjM3MDA5YzItNmY3N2JlY2EtZGI0ZmIyNzAtNWFmMTQ0OWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-07-08T12:00:57.420428Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:941:2786], Recipient [13:603:2519]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-07-08T12:00:57.420459Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-07-08T12:00:57.420469Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2025-07-08T12:00:57.420477Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2025-07-08T12:00:57.420487Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-07-08T12:00:57.420508Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-07-08T12:00:57.420513Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-07-08T12:00:57.420518Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:00:57.420522Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:00:57.420536Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-07-08T12:00:57.420541Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-07-08T12:00:57.420545Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:00:57.420549Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-07-08T12:00:57.420553Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-07-08T12:00:57.420567Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-07-08T12:00:57.420643Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:941:2786], 0} after executionsCount# 1 2025-07-08T12:00:57.420651Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:941:2786], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:00:57.420665Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:941:2786], 0} finished in read 2025-07-08T12:00:57.420674Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-07-08T12:00:57.420677Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-07-08T12:00:57.420681Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:00:57.420685Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:00:57.420696Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-07-08T12:00:57.420699Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:00:57.420703Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-07-08T12:00:57.420708Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-07-08T12:00:57.420727Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-07-08T12:00:57.420899Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:941:2786], Recipient [13:603:2519]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-07-08T12:00:57.420906Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> Secret::Deactivated [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000fb1/r3tmp/tmpp3bh7R/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26347, node 1 TClient is connected to server localhost:14360 2025-07-08T12:00:45.835997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:45.852606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:45.853531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:45.853547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:45.853551Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:45.853628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:45.889481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:45.889513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:45.901558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:46.107864Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-07-08T12:00:57.607944Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:618:2528], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-07-08T12:00:57.608416Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTFjZjBjYi05ZDc1ZmQyMy1lMTM1OTRkMC0zNDRlYjU0Mg==, ActorId: [1:616:2526], ActorState: ExecuteState, TraceId: 01jzmyj6qd9rhhy08z9zq3g50c, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:00:55.137383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679860546579982:2162];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.137463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:55.143427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679859834464312:2230];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.143446Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016e8/r3tmp/tmp24K0dJ/pdisk_1.dat 2025-07-08T12:00:55.167391Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.173136Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.196277Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29409, node 1 2025-07-08T12:00:55.221822Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016e8/r3tmp/yandex4qAqQO.tmp 2025-07-08T12:00:55.221834Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016e8/r3tmp/yandex4qAqQO.tmp 2025-07-08T12:00:55.225216Z INFO: TTestServer started on Port 10135 GrpcPort 29409 TClient is connected to server localhost:10135 2025-07-08T12:00:55.236515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.236541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting PQClient connected to localhost:29409 === TenantModeEnabled() = 1 === Init PQ - start server on port 29409 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:00:55.241782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:55.269451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.269482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.273339Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:55.277080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.281980Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016e8/r3tmp/yandex4qAqQO.tmp 2025-07-08T12:00:55.282122Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:55.286134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:00:55.286182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.286237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:00:55.286291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:55.286303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:55.289334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.289365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:00:55.289409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.289417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:00:55.289419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:00:55.289423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2025-07-08T12:00:55.290274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.290282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-07-08T12:00:55.290285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.293299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.293313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:00:55.293317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:00:55.293765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.293776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.293780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.293784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.294465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:55.294780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:00:55.294810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:00:55.295239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976055340, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.295266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976055340 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:55.295272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.295321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:00:55.295330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.295355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:00:55.295387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:00:55.295692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:55.295701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:00:55.295735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:55.295742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679860546580435:2383], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:00:55.295748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.295751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:00:55.295766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.295772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.295775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.295777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.295779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:00:55.295782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.295785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:00:55.295788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:00:55.295796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... INFO: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] [null] Closing session to cluster: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500032 " } 2025-07-08T12:00:57.858458Z :NOTICE: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:00:57.858465Z :DEBUG: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] [null] Abort session to cluster Got new read session event: SessionClosed { Status: BAD_REQUEST Issues: "
: Error: no read rule provided for consumer 'non_existing' in topic '/Root/account1/write_topic' in current cluster '', code: 500032 " } 2025-07-08T12:00:57.858479Z :INFO: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] Closing read session. Close timeout: 0.000000s 2025-07-08T12:00:57.858488Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-07-08T12:00:57.858497Z :INFO: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] Counters: { Errors: 1 CurrentSessionLifetimeMs: 13 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:57.858504Z :NOTICE: [/Root] [/Root] [91dde2d4-ead844b1-6a2af923-f96d5795] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:00:57.858540Z :INFO: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Starting read session 2025-07-08T12:00:57.858548Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Starting session to cluster null (localhost:29148) 2025-07-08T12:00:57.858575Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-07-08T12:00:57.858578Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-07-08T12:00:57.858581Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] Reconnecting session to cluster null in 0.000000s 2025-07-08T12:00:57.858799Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] Successfully connected. Initializing session 2025-07-08T12:00:57.859033Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-07-08T12:00:57.859040Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-07-08T12:00:57.859183Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-07-08T12:00:57.859213Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 read init: from# ipv6:[::1]:58530, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-07-08T12:00:57.859255Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 auth for : consumer_aba 2025-07-08T12:00:57.859449Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 Handle describe topics response 2025-07-08T12:00:57.859464Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 auth is DEAD 2025-07-08T12:00:57.859477Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 auth ok: topics# 1, initDone# 0 2025-07-08T12:00:57.859702Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 register session: topic# /Root/account1/write_topic 2025-07-08T12:00:57.859917Z :INFO: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] Server session id: consumer_aba_3_2_9429121350532347346_v1 2025-07-08T12:00:57.859969Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-07-08T12:00:57.860045Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 grpc read done: success# 1, data# { read { } } 2025-07-08T12:00:57.860074Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 got read request: guid# 62631b3-608c78e4-8fc71d51-41cef686 2025-07-08T12:00:57.860226Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7524679868898789715:2336] connected; active server actors: 1 2025-07-08T12:00:57.860294Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7524679868898789715:2336] session consumer_aba_3_2_9429121350532347346_v1 2025-07-08T12:00:57.860306Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-07-08T12:00:57.860317Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-07-08T12:00:57.860324Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_9429121350532347346_v1" (Sender=[3:7524679868898789712:2336], Pipe=[3:7524679868898789715:2336], Partitions=[], ActiveFamilyCount=0) 2025-07-08T12:00:57.860328Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-07-08T12:00:57.860339Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-07-08T12:00:57.860345Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_9429121350532347346_v1" (Sender=[3:7524679868898789712:2336], Pipe=[3:7524679868898789715:2336], Partitions=[], ActiveFamilyCount=0) 2025-07-08T12:00:57.860357Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_9429121350532347346_v1" sender [3:7524679868898789712:2336] lock partition 0 for ReadingSession "consumer_aba_3_2_9429121350532347346_v1" (Sender=[3:7524679868898789712:2336], Pipe=[3:7524679868898789715:2336], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-07-08T12:00:57.860364Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-07-08T12:00:57.860368Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000027s 2025-07-08T12:00:57.860477Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_9429121350532347346_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7524679868898789715 RawX2: 4503612512274720 } Path: "/Root/account1/write_topic" } 2025-07-08T12:00:57.860491Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-07-08T12:00:57.860944Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7524679868898789718:2339] 2025-07-08T12:00:57.861137Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_9429121350532347346_v1:1 with generation 1 2025-07-08T12:00:57.867347Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1751976057743 CreateTimestampMS: 1751976057743 SizeLag: 165 WriteTimestampEstimateMS: 1751976057743 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-07-08T12:00:57.867379Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-07-08T12:00:57.867412Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-07-08T12:00:57.867897Z :INFO: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Closing read session. Close timeout: 0.000000s 2025-07-08T12:00:57.867911Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-07-08T12:00:57.867921Z :INFO: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 9 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:57.867941Z :NOTICE: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-07-08T12:00:57.867949Z :DEBUG: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] [null] Abort session to cluster 2025-07-08T12:00:57.868048Z :NOTICE: [/Root] [/Root] [365ce65b-20bc7a22-d212ab05-3a751a1f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:00:57.873092Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 grpc read done: success# 0, data# { } 2025-07-08T12:00:57.873108Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 grpc read failed 2025-07-08T12:00:57.873115Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 grpc closed 2025-07-08T12:00:57.873134Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_9429121350532347346_v1 is DEAD 2025-07-08T12:00:57.874020Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_9429121350532347346_v1 2025-07-08T12:00:57.874129Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7524679868898789715:2336] disconnected; active server actors: 1 2025-07-08T12:00:57.874138Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7524679868898789715:2336] client consumer_aba disconnected session consumer_aba_3_2_9429121350532347346_v1 >> TOlapReboots::CreateStandaloneTable [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000f99/r3tmp/tmpva58ip/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3169, node 1 TClient is connected to server localhost:27098 2025-07-08T12:00:46.199104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:46.220840Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:46.221678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:00:46.221691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:00:46.221695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:00:46.221778Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:46.256606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:46.256641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:46.268378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:46.467290Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] |68.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStandaloneTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:36.914726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:36.914743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.914748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:36.914753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:36.914758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:36.914762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:36.914770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.914786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:36.914856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:36.928605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:36.928631Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.933954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:36.933995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:36.934027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:36.935559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:36.935685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:36.935774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.935828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:36.937548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.937599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:36.937811Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.937821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.937838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:36.937846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.937851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:36.937887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:36.941791Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.959089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:36.959150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.959199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:36.959236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:36.959246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.959939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.959962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:36.960001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.960009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:36.960014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:36.960019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:36.960426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.960437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:36.960442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:36.960788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.960798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.960803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.960810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.961389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:36.961774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:36.961809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:36.961980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.962002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:36.962009Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.962071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:36.962077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.962101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:36.962113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:36.962483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.962491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.962525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.962530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:36.962587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.962593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:36.962604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.962608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.962612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.962615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.962619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:36.962624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... blet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-07-08T12:00:58.804216Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1002 at step: 5000003 2025-07-08T12:00:58.804301Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:58.804321Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 292057778285 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:58.804327Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId# 1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-07-08T12:00:58.804409Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2025-07-08T12:00:58.804437Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:58.804448Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:58.804688Z node 68 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1002;fline=tables_manager.cpp:45;method=resolve_internal_path_id;ss_local=3;result=not_found; 2025-07-08T12:00:58.805701Z node 68 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1002;fline=tx_controller.cpp:215;event=finished_tx;tx_id=1002; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-07-08T12:00:58.806177Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:58.806188Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:58.806235Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:58.806268Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:58.806275Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-07-08T12:00:58.806281Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-07-08T12:00:58.806396Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:58.806405Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-07-08T12:00:58.806414Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:00:58.806591Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.806605Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.806610Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:58.806615Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T12:00:58.806621Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:58.806849Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.806867Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.806871Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:58.806876Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-07-08T12:00:58.806880Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:58.806894Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-07-08T12:00:58.807428Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:00:58.807714Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.808109Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.818870Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2025-07-08T12:00:58.818890Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:58.818923Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-07-08T12:00:58.819412Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:58.819453Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:58.819460Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-07-08T12:00:58.819474Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:58.819478Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.819483Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:58.819487Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.819492Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-07-08T12:00:58.819505Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:366:2343] message: TxId: 1002 2025-07-08T12:00:58.819511Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.819517Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-07-08T12:00:58.819521Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-07-08T12:00:58.819550Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:58.820563Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-07-08T12:00:58.820575Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:367:2344] TestWaitNotification: OK eventTxId 1002 2025-07-08T12:00:58.820685Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:58.820745Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 70us result status StatusSuccess 2025-07-08T12:00:58.820903Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:00:55.252325Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679863505326121:2078];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.252339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:55.273428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679859921089138:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.273450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016d7/r3tmp/tmpWZly2O/pdisk_1.dat 2025-07-08T12:00:55.291649Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.297401Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.317939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:55.357375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.357401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7917, node 1 2025-07-08T12:00:55.366339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.393913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.393933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.401136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016d7/r3tmp/yandexEXVa9C.tmp 2025-07-08T12:00:55.401146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016d7/r3tmp/yandexEXVa9C.tmp 2025-07-08T12:00:55.406134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:55.406618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.406885Z INFO: TTestServer started on Port 4169 GrpcPort 7917 2025-07-08T12:00:55.418654Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016d7/r3tmp/yandexEXVa9C.tmp 2025-07-08T12:00:55.418763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4169 PQClient connected to localhost:7917 === TenantModeEnabled() = 1 === Init PQ - start server on port 7917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:55.459100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:00:55.459144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.459198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:00:55.459240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:55.459246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.460046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.460072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:00:55.460104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.460109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:00:55.460111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-07-08T12:00:55.460114Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-07-08T12:00:55.461069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.461078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:00:55.461081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-07-08T12:00:55.461555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.461561Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.461564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.461568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-07-08T12:00:55.462212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:55.462634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-07-08T12:00:55.462667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:00:55.463262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976055508, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.463286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976055508 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:55.463291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.463337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-07-08T12:00:55.463353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.463380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:00:55.463387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:00:55.463844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:55.463851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:00:55.463896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:55.463899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679863505326659:2372], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-07-08T12:00:55.463904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.463907Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-07-08T12:00:55.463916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-07-08T12:00:55.463918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-07-08T12:00:55.463927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-07-08T12:00:55.463929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-07-08T12:00:55.463931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-07-08T12:00:55.463935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-07-08T12:00:55.463937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-07-08T12:00:55.463939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-07-08T12:00:55.463947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-07-08T12:00:55.463950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-07-08T12:00:55.463952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-07-08T12:00:55.464396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: ... ==Assert streaming op1 ===Assert streaming op2 2025-07-08T12:00:57.401398Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:00:57.401407Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-07-08T12:00:57.401576Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-07-08T12:00:57.401595Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:37658 2025-07-08T12:00:57.401601Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:37658 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-07-08T12:00:57.401605Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:00:57.401928Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-07-08T12:00:57.401975Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:00:57.401976Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:00:57.401978Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:00:57.401990Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679868538298985:2324] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:00:57.401995Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:00:57.402451Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 4, Generation: 1 2025-07-08T12:00:57.403138Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|2a6cd381-d96e9504-5512b4e6-cb47f0e3_0 2025-07-08T12:00:57.403565Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|2a6cd381-d96e9504-5512b4e6-cb47f0e3_0 grpc read done: success: 1 data: write_request[data omitted] 2025-07-08T12:00:57.403631Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-07-08T12:00:57.403960Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::IEventHandle 2025-07-08T12:00:57.405746Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::IEventHandle ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-07-08T12:00:57.410796Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976720664 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:37660" , at schemeshard: 72057594046644480 2025-07-08T12:00:57.410964Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:57.410995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 6] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-07-08T12:00:57.410996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:00:57.411030Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720664:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:57.411035Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-07-08T12:00:57.411051Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720664:0 progress is 1/1 2025-07-08T12:00:57.411054Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720664 ready parts: 1/1 2025-07-08T12:00:57.411058Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720664:0 progress is 1/1 2025-07-08T12:00:57.411060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720664 ready parts: 1/1 2025-07-08T12:00:57.411068Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-07-08T12:00:57.411080Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720664, ready parts: 1/1, is published: false 2025-07-08T12:00:57.411086Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-07-08T12:00:57.411088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720664 ready parts: 1/1 2025-07-08T12:00:57.411090Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720664:0 2025-07-08T12:00:57.411094Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720664, publications: 1, subscribers: 0 2025-07-08T12:00:57.411096Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720664, [OwnerId: 72057594046644480, LocalPathId: 6], 4 2025-07-08T12:00:57.417313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720664, response: Status: StatusSuccess TxId: 281474976720664 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:57.417385Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720664, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-07-08T12:00:57.417440Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:57.417444Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720664, path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:00:57.417498Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:57.417502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7524679864243330940:2371], at schemeshard: 72057594046644480, txId: 281474976720664, path id: 6 2025-07-08T12:00:57.417862Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-07-08T12:00:57.417872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976720664 2025-07-08T12:00:57.417874Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720664 2025-07-08T12:00:57.417878Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 4 2025-07-08T12:00:57.417883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-07-08T12:00:57.417915Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720664, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-07-08T12:00:57.425210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2025-07-08T12:00:57.693316Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:57.700157Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:58.057708Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7524679872833266333:2329], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:58.058183Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjYxYjE4ZGQtYTQ3ODJhMGMtNmIyY2ZiNWUtNzA5ZTJmYmQ=, ActorId: [3:7524679872833266331:2328], ActorState: ExecuteState, TraceId: 01jzmyj766c3eqmh1xje2s8agc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:58.058289Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:58.396232Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:00:58.397130Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|2a6cd381-d96e9504-5512b4e6-cb47f0e3_0 describe result for acl check 2025-07-08T12:00:58.397165Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|2a6cd381-d96e9504-5512b4e6-cb47f0e3_0 2025-07-08T12:00:58.397368Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|2a6cd381-d96e9504-5512b4e6-cb47f0e3_0 is DEAD 2025-07-08T12:00:58.397454Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TOlapReboots::CreateStore [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] |68.6%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:00:55.624233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679861329151284:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.624484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:55.633183Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679863230200113:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.656449Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.655375Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.656685Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016d3/r3tmp/tmpQB2F8I/pdisk_1.dat 2025-07-08T12:00:55.693092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10275, node 1 2025-07-08T12:00:55.720392Z INFO: TTestServer started on Port 32394 GrpcPort 10275 2025-07-08T12:00:55.717374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016d3/r3tmp/yandex2bBYOf.tmp 2025-07-08T12:00:55.717385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016d3/r3tmp/yandex2bBYOf.tmp 2025-07-08T12:00:55.722735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.722758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.724767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32394 PQClient connected to localhost:10275 === TenantModeEnabled() = 1 === Init PQ - start server on port 10275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:00:55.736555Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016d3/r3tmp/yandex2bBYOf.tmp 2025-07-08T12:00:55.736616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:55.757027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.757050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.758256Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:55.758521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.765735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:00:55.765789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.765855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:00:55.765910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:55.765925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.766783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.766812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:00:55.766842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.766848Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:00:55.766851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:00:55.766852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-07-08T12:00:55.767326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.767340Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:00:55.767343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:00:55.767826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.767833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.767836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.767839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.768271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:55.768355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.768358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-07-08T12:00:55.768360Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.768619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:00:55.768652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:00:55.769187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976055816, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.769207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976055816 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:55.769212Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.769265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:00:55.769269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.769292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:00:55.769297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:00:55.769665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:55.769671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:00:55.769698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:55.769700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679861329151750:2383], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:00:55.769705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.769708Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:00:55.769716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.769717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.769719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.769720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.769722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:00:55.769724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.769726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:00:55.769727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:00:55.769734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... ipv6:[::1]:33628 proto=v1 topic=/Root/PQ/account/topic durationSec=0 2025-07-08T12:00:58.805542Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:00:58.805944Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: describe result for acl check 2025-07-08T12:00:58.805977Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:00:58.805979Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:00:58.805980Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:00:58.805991Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679876420674662:2374] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:00:58.805995Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:00:58.806284Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-07-08T12:00:58.806298Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7524679876420674665:2374], now have 1 active actors on pipe 2025-07-08T12:00:58.806367Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:00:58.806376Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-07-08T12:00:58.806402Z node 3 :PERSQUEUE INFO: new Cookie 123|326daac4-17ef0ead-811df09b-fb02ee91_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-07-08T12:00:58.806429Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:00:58.806445Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:00:58.806554Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:00:58.806558Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-07-08T12:00:58.806569Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:00:58.806585Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 2025-07-08T12:00:58.809112Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976058809 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:00:58.809143Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|326daac4-17ef0ead-811df09b-fb02ee91_0" topic: "PQ/account/topic" 2025-07-08T12:00:58.809248Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write 1 messages with Id from 1 to 1 2025-07-08T12:00:58.809265Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: try to update token 2025-07-08T12:00:58.809271Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Send 1 message(s) (0 left), first sequence number is 3 2025-07-08T12:00:58.809329Z :INFO: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: close. Timeout = 10000 ms 2025-07-08T12:00:58.813073Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 grpc read done: success: 1 data: write_request[data omitted] 2025-07-08T12:00:58.813205Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-07-08T12:00:58.813729Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:00:58.813742Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-07-08T12:00:58.813777Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-07-08T12:00:58.813796Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:00:58.813913Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:00:58.813917Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-07-08T12:00:58.813930Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-07-08T12:00:58.813953Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-07-08T12:00:58.813966Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-07-08T12:00:58.814001Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-07-08T12:00:58.814045Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-07-08T12:00:58.814093Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1751976058813 2025-07-08T12:00:58.814113Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:00:58.814120Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-07-08T12:00:58.820759Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7524679876420674349:2346] 2025-07-08T12:00:58.820793Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:00:58.820802Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:00:58.820812Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-07-08T12:00:58.820852Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-07-08T12:00:58.820870Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:00:58.821139Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-07-08T12:00:58.821224Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 6 } 2025-07-08T12:00:58.821238Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: acknoledged message 1 2025-07-08T12:00:58.912989Z :INFO: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session will now close 2025-07-08T12:00:58.913013Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: aborting 2025-07-08T12:00:58.913198Z :INFO: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:00:58.913207Z :DEBUG: [] MessageGroupId [123] SessionId [123|326daac4-17ef0ead-811df09b-fb02ee91_0] Write session: destroy 2025-07-08T12:00:58.913660Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 grpc read done: success: 0 data: 2025-07-08T12:00:58.913671Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 grpc read failed 2025-07-08T12:00:58.913680Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 grpc closed 2025-07-08T12:00:58.913682Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|326daac4-17ef0ead-811df09b-fb02ee91_0 is DEAD 2025-07-08T12:00:58.913845Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:00:58.914170Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7524679876420674665:2374] destroyed 2025-07-08T12:00:58.914183Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-07-08T12:00:59.008729Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7524679880715641974:2377], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:59.009149Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmNjMTE5OTQtOWRkN2JhZWItMTU4NzlmYmMtOWE3YWQ0MGM=, ActorId: [3:7524679880715641972:2376], ActorState: ExecuteState, TraceId: 01jzmyj83x6vcjfyycg60vhpj2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:59.009278Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-07-08T12:00:55.162153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001057/r3tmp/tmpSjijAW/pdisk_1.dat 2025-07-08T12:00:55.294160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.313606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:55.346190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.346232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.357515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.443225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.477210Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:613:2527] 2025-07-08T12:00:55.477322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:55.486233Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:55.486291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:55.486472Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:55.486482Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:55.486490Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:55.486557Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:55.486612Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:55.486625Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:640:2527] in generation 1 2025-07-08T12:00:55.486960Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:617:2529] 2025-07-08T12:00:55.486997Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:55.488287Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:55.488313Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:55.488458Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-07-08T12:00:55.488465Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-07-08T12:00:55.488472Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-07-08T12:00:55.488510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:55.488528Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:55.488539Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:648:2529] in generation 1 2025-07-08T12:00:55.498911Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:55.503160Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:55.503266Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:55.503301Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:651:2548] 2025-07-08T12:00:55.503306Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:55.503311Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:55.503318Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:55.503443Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:55.503450Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-07-08T12:00:55.503462Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:55.503472Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:652:2549] 2025-07-08T12:00:55.503476Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:55.503479Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-07-08T12:00:55.503483Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:55.503604Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:55.503634Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:55.503656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:55.503662Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:55.503671Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:55.503677Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:55.503683Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-07-08T12:00:55.503690Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-07-08T12:00:55.503706Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:607:2523], serverId# [1:625:2533], sessionId# [0:0:0] 2025-07-08T12:00:55.503712Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:55.503716Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:55.503719Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-07-08T12:00:55.503723Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:55.503856Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:55.503946Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:55.503965Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:55.504044Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:608:2524], serverId# [1:638:2541], sessionId# [0:0:0] 2025-07-08T12:00:55.504079Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:55.504100Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-07-08T12:00:55.504113Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-07-08T12:00:55.504457Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:55.504473Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:55.517290Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:55.517342Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:55.517511Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:55.517520Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:55.689811Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:676:2567], serverId# [1:678:2569], sessionId# [0:0:0] 2025-07-08T12:00:55.689861Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:677:2568], serverId# [1:680:2571], sessionId# [0:0:0] 2025-07-08T12:00:55.690655Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-07-08T12:00:55.690676Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:55.690800Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:55.690810Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:55.690822Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-07-08T12:00:55.690901Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:55.690935Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:55.690975Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-07-08T12:00:55.690989Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:00:55.691385Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:00:55.691472Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:55.691732Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:00:55.691741Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:55.691760Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-07-08T12:00:55.691764Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:55.691997Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:55.692006Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:00:55.692013Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:00:55.692055Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:00:55.692077Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:00:55.692217Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 720751862 ... ARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:59.432206Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-07-08T12:00:59.432219Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-07-08T12:00:59.432225Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-07-08T12:00:59.432243Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:373:2366], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:59.432250Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:59.432259Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:59.432460Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:00:59.432471Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:59.432527Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:00:59.432531Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:59.432768Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:59.432777Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:59.432782Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:00:59.432794Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:373:2366], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:59.432800Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:59.432813Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:59.432823Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-07-08T12:00:59.432828Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:59.432926Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-07-08T12:00:59.432934Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:59.433192Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:59.433210Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:00:59.433219Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-07-08T12:00:59.433346Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-07-08T12:00:59.433353Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-07-08T12:00:59.433357Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-07-08T12:00:59.433367Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:373:2366], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:00:59.433374Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:00:59.433384Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-07-08T12:00:59.434065Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-07-08T12:00:59.434079Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-07-08T12:00:59.434111Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:59.434196Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:59.434224Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:00:59.434271Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:00:59.434276Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:00:59.434445Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-07-08T12:00:59.434452Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-07-08T12:00:59.501886Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyj8hb7ewpnt2075htstjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2Y4OTFhOGItZThhM2M4ZGYtZTFlNzlkNDAtZjkwYTRmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:59.502800Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:874:2690], serverId# [4:875:2691], sessionId# [0:0:0] 2025-07-08T12:00:59.502961Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715658, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-07-08T12:00:59.503509Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715659. Ctx: { TraceId: 01jzmyj8hb7ewpnt2075htstjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2Y4OTFhOGItZThhM2M4ZGYtZTFlNzlkNDAtZjkwYTRmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:59.504110Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyj8hb7ewpnt2075htstjc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=N2Y4OTFhOGItZThhM2M4ZGYtZTFlNzlkNDAtZjkwYTRmMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:59.504277Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:59.504605Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1751976059504568 Step: 1001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.504637Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1751976059504568 Step: 1001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.517381Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:59.517430Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1001 from mediator time cast 2025-07-08T12:00:59.517461Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-07-08T12:00:59.517473Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:59.540423Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1001 at tablet 72075186224037889 2025-07-08T12:00:59.540477Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:59.553247Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyj8mndkqw9kgbty8dd4cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWQ3Y2VjYWMtYWU2MTYxYi1jZWUyNjY5Ni02N2U1ZTYxZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:00:59.553437Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-07-08T12:00:59.553811Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1751976059553776 Step: 1001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.553846Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1751976059553776 Step: 1001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.553857Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1751976059553776 Step: 1001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.553867Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1751976059553776 Step: 1001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-07-08T12:00:59.564325Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-07-08T12:00:59.564401Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-07-08T12:00:59.564413Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-07-08T12:00:59.565653Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:926:2732], serverId# [4:927:2733], sessionId# [0:0:0] 2025-07-08T12:00:59.566815Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:928:2734], serverId# [4:929:2735], sessionId# [0:0:0] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:55.401598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.401617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.401621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.401626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.401631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.401634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.401641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.401657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.401726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.413748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:55.413773Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.416533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.416566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.416605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.417929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.418051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.418151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.418201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.418605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.418648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.418840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.418847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.418864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.418873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.418879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.418910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:55.420051Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.437279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:55.437338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.437394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:55.437431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:55.437441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:55.438141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438150Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:55.438155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:55.438159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:55.438515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:55.438800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.438814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.438820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.439311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:55.439661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:55.439704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:55.439880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.439905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:55.439912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.439999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:55.440006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.440033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:55.440046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:55.440398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.440405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.440448Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.440452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:55.440520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.440526Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:55.440537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.440541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.440545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.440548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.440552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:55.440556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... emeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:58.952564Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [120:208:2210], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-07-08T12:00:58.952569Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [120:208:2210], at schemeshard: 72057594046678944, txId: 1002, path id: 4 2025-07-08T12:00:58.952590Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:00:58.952594Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [120:426:2392] 2025-07-08T12:00:58.952857Z node 120 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.952871Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.952876Z node 120 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:58.952881Z node 120 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:00:58.952887Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:00:58.952991Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-07-08T12:00:58.953086Z node 120 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.953095Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.953098Z node 120 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:58.953102Z node 120 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-07-08T12:00:58.953105Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T12:00:58.953116Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-07-08T12:00:58.953652Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-07-08T12:00:58.953705Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 TestWaitNotification: OK eventTxId 1003 2025-07-08T12:00:58.965640Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 2025-07-08T12:00:58.965659Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409547, partId: 0 2025-07-08T12:00:58.965675Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-07-08T12:00:58.966008Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:58.966033Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:58.966038Z node 120 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-07-08T12:00:58.966050Z node 120 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:58.966053Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.966056Z node 120 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:58.966058Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.966061Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-07-08T12:00:58.966071Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [120:425:2391] message: TxId: 1002 2025-07-08T12:00:58.966076Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:58.966080Z node 120 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-07-08T12:00:58.966083Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-07-08T12:00:58.966116Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T12:00:58.966401Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-07-08T12:00:58.966408Z node 120 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [120:426:2392] TestWaitNotification: OK eventTxId 1002 2025-07-08T12:00:58.966488Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:58.966537Z node 120 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 57us result status StatusSuccess 2025-07-08T12:00:58.966639Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:58.966725Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:58.966740Z node 120 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 16us result status StatusSuccess 2025-07-08T12:00:58.966773Z node 120 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409547 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:36.955795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:36.955818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.955822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:36.955827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:36.955832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:36.955836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:36.955844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.955861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:36.955940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:36.968132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:36.968157Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.971359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:36.971401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:36.971436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:36.973041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:36.973183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:36.973280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.973324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:36.973749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.973783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:36.973937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.973943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.973955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:36.973959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.973963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:36.973987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:36.974901Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.987014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:36.987066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.987112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:36.987141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:36.987149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.987748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.987778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:36.987821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.987832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:36.987837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:36.987842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:36.988251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.988261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:36.988266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:36.988593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.988603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.988608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.988615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.989031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:36.989397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:36.989436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:36.989625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.989649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:36.989656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.989728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:36.989735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.989763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:36.989773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:36.990137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.990144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.990187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.990192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:36.990263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.990269Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:36.990279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.990284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.990288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.990291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.990295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:36.990300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... Operation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 1002 MinStep: 57 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:59.582823Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1002 msg type: 269090816 2025-07-08T12:00:59.582862Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1002, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1002 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1002 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 1002 at step: 5000003 2025-07-08T12:00:59.582953Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:59.582973Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1002 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 292057778285 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:59.582980Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 1002:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-07-08T12:00:59.583049Z node 68 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 128 -> 129 2025-07-08T12:00:59.583091Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:00:59.583103Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:59.583346Z node 68 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1002;fline=tx_controller.cpp:215;event=finished_tx;tx_id=1002; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-07-08T12:00:59.583661Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:59.583669Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:59.583709Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1002, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:00:59.583738Z node 68 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:59.583743Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-07-08T12:00:59.583747Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [68:207:2209], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-07-08T12:00:59.583839Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:59.583845Z node 68 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-07-08T12:00:59.583853Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 1002:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:00:59.583986Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.583997Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.584001Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:59.584009Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-07-08T12:00:59.584014Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:00:59.584204Z node 68 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.584217Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.584220Z node 68 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-07-08T12:00:59.584224Z node 68 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-07-08T12:00:59.584228Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:00:59.584238Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2025-07-08T12:00:59.584562Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1002:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:00:59.584799Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.585056Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-07-08T12:00:59.603726Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 2025-07-08T12:00:59.603752Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-07-08T12:00:59.603779Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1002 FAKE_COORDINATOR: Erasing txId 1002 2025-07-08T12:00:59.604444Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:59.604485Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-07-08T12:00:59.604492Z node 68 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-07-08T12:00:59.604510Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:59.604515Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:59.604520Z node 68 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-07-08T12:00:59.604523Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:59.604528Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-07-08T12:00:59.604542Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [68:366:2343] message: TxId: 1002 2025-07-08T12:00:59.604550Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-07-08T12:00:59.604555Z node 68 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-07-08T12:00:59.604562Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-07-08T12:00:59.604592Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:00:59.605096Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-07-08T12:00:59.605109Z node 68 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [68:367:2344] TestWaitNotification: OK eventTxId 1002 2025-07-08T12:00:59.605224Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:00:59.605287Z node 68 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 73us result status StatusSuccess 2025-07-08T12:00:59.605447Z node 68 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-07-08T12:00:53.443807Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1751976053443796 2025-07-08T12:00:53.644042Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679851590147550:2077];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.644063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.644026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679852005543277:2235];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.644113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001f2a/r3tmp/tmpYgcPKR/pdisk_1.dat 2025-07-08T12:00:53.678294Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:53.681323Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:53.703379Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12613, node 1 2025-07-08T12:00:53.719838Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001f2a/r3tmp/yandexonRl6S.tmp 2025-07-08T12:00:53.719853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001f2a/r3tmp/yandexonRl6S.tmp 2025-07-08T12:00:53.719908Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001f2a/r3tmp/yandexonRl6S.tmp 2025-07-08T12:00:53.719948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:53.724519Z INFO: TTestServer started on Port 9768 GrpcPort 12613 TClient is connected to server localhost:9768 PQClient connected to localhost:12613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting...2025-07-08T12:00:53.749545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:53.777791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:53.777817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:53.778309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:53.778322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:53.778881Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:53.779187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:53.780455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-07-08T12:00:54.010295Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679851590147762:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.010400Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzA2YzBhMzEtMWM5MjRkYWMtMjY1ZTMzMy1hNGVhYjM5Yg==, ActorId: [2:7524679851590147760:2262], ActorState: ExecuteState, TraceId: 01jzmyj36p1efcm99ba92z6cya, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.010816Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.010839Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679856300511264:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.011221Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzdmNzNjMDctZmUwMjFmYzEtZmNkZTYyNzUtYzVhODU5YjA=, ActorId: [1:7524679852005543965:2290], ActorState: ExecuteState, TraceId: 01jzmyj37n57a81t116mtfvqzx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.011338Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.015899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.044094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.128338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:12613", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:54.177597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jzmyj3cabrybqk51e6qnpjv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzUyODg3NDItODNmYjFmMGMtYTRhYmMxNjUtNGYwNGJlMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679856300511684:2902] 2025-07-08T12:00:54.641002Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:54.646009Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:58.649412Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679851590147550:2077];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.649445Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:58.649489Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679852005543277:2235];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.649512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:00:59.269901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:12613 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:00:59.409845Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:12613 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:00:59.437453Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-07-08T12:00:59.441818Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679877775348818:3310] connected; active server actors: 1 2025-07-08T12:00:59.442258Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025- ... ion: 0 SourceId: '\0src_id' SeqNo: 1 partNo : 0 messageNo: 1 size 115 offset: -1 2025-07-08T12:01:00.644211Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 1 partNo 0 2025-07-08T12:01:00.644265Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-07-08T12:01:00.644318Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 177 WTime 1751976060644 2025-07-08T12:01:00.644352Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:01:00.644366Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 177 2025-07-08T12:01:00.645301Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 177 actorID [1:7524679877775348846:2428] 2025-07-08T12:01:00.645340Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:01:00.645350Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:01:00.645377Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-07-08T12:01:00.645382Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 177 2025-07-08T12:01:00.645408Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T12:01:00.645416Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-07-08T12:01:00.645432Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-07-08T12:01:00.645450Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-07-08T12:01:00.645450Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:01:00.645455Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-07-08T12:01:00.645461Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-07-08T12:01:00.645465Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-07-08T12:01:00.645482Z node 1 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1751976060644 queuesize 0 startOffset 0 2025-07-08T12:01:00.645671Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-07-08T12:01:00.645859Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-07-08T12:01:00.645871Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-07-08T12:01:00.645878Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-07-08T12:01:00.645953Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler 2025-07-08T12:01:00.646082Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: try to update token 2025-07-08T12:01:00.646097Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 2025-07-08T12:01:00.646277Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|d03ffd62-cd986c52-4c406652-81eff072_0 grpc read done: success: 1 data: write_request[data omitted] 2025-07-08T12:01:00.646347Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-07-08T12:01:00.646414Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:00.646423Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:00.646444Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-07-08T12:01:00.646452Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:01:00.646502Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:00.646508Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:00.646522Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-07-08T12:01:00.646565Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-07-08T12:01:00.646597Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-07-08T12:01:00.646659Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1751976060646 2025-07-08T12:01:00.646692Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:01:00.646704Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-07-08T12:01:00.647480Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [1:7524679877775348846:2428] 2025-07-08T12:01:00.647547Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 === AcksHandler has written a message, closing the session 2025-07-08T12:01:00.649055Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:01:00.649071Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:01:00.649085Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-07-08T12:01:00.649123Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-07-08T12:01:00.649139Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:01:00.649418Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-07-08T12:01:00.649475Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-07-08T12:01:00.649482Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-07-08T12:01:00.649486Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-07-08T12:01:00.650075Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-07-08T12:01:00.650122Z :INFO: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-07-08T12:01:00.650126Z :INFO: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session will now close 2025-07-08T12:01:00.650132Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: aborting 2025-07-08T12:01:00.650288Z :WARNING: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-07-08T12:01:00.650332Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d03ffd62-cd986c52-4c406652-81eff072_0] MessageGroupId [src_id] Write session: destroy 2025-07-08T12:01:00.650590Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|d03ffd62-cd986c52-4c406652-81eff072_0 grpc read done: success: 0 data: 2025-07-08T12:01:00.650599Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|d03ffd62-cd986c52-4c406652-81eff072_0 grpc read failed 2025-07-08T12:01:00.650606Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|d03ffd62-cd986c52-4c406652-81eff072_0 grpc closed 2025-07-08T12:01:00.650610Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|d03ffd62-cd986c52-4c406652-81eff072_0 is DEAD 2025-07-08T12:01:00.650792Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:00.650981Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7524679882070316497:2475] destroyed 2025-07-08T12:01:00.650993Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-07-08T11:59:08.001996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:98;event=initialize_shard;step=OnActivateExecutor; 2025-07-08T11:59:08.004913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:125:2157];fline=columnshard.cpp:116;event=initialize_shard;step=initialize_tiring_finished; 2025-07-08T11:59:08.004977Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-07-08T11:59:08.005532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-07-08T11:59:08.005604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-07-08T11:59:08.005637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-07-08T11:59:08.005649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-07-08T11:59:08.005660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-07-08T11:59:08.005673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-07-08T11:59:08.005685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-07-08T11:59:08.005695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-07-08T11:59:08.005705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-07-08T11:59:08.005715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.005726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-07-08T11:59:08.005737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:125:2157];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-07-08T11:59:08.010381Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-07-08T11:59:08.010571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:132;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-07-08T11:59:08.010581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-07-08T11:59:08.010611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.010649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-07-08T11:59:08.010659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-07-08T11:59:08.010664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-07-08T11:59:08.010673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-07-08T11:59:08.010681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-07-08T11:59:08.010688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-07-08T11:59:08.010693Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-07-08T11:59:08.010708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-07-08T11:59:08.010714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-07-08T11:59:08.010721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-07-08T11:59:08.010726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-07-08T11:59:08.010734Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-07-08T11:59:08.010740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-07-08T11:59:08.010747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-07-08T11:59:08.010751Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-07-08T11:59:08.010759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-07-08T11:59:08.010766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-07-08T11:59:08.010769Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-07-08T11:59:08.010792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-07-08T11:59:08.010799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-07-08T11:59:08.010804Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-07-08T11:59:08.010823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-07-08T11:59:08.010830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-07-08T11:59:08.010834Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-07-08T11:59:08.010845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-07-08T11:59:08.010851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.010855Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-07-08T11:59:08.010862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_deprecated_snapshot.cpp:30;tasks_for_rewrite=0; 2025-07-08T11:59:08.010869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-07-08T11:59:08.010875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-07-08T11:59:08.010879Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-07-08T11:59:08.010917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=16; 2025-07-08T11:59:08.010930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=8; 2025-07-08T11:59:08.010938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-07-08T11:59:08.010948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=7; 2025-07-08T11:59:08.010958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks_v0_meta.cpp:100;normalizer=TChunksV0MetaNormalizer;message=0 chunks found; 2025-07-08T11:59:08.010967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-07-08T11:59:08.010974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-07-08T11:59:08.010979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-07-08T11:59:08.011001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-07-08T11:59:08.011007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;ev ... =70548;count=254;size_of_portion=184; 2025-07-08T12:01:00.613480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:48;memory_size=110;data_size=85;sum=27986;count=509; 2025-07-08T12:01:00.613487Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_meta.cpp:65;memory_size=206;data_size=197;sum=52466;count=510;size_of_meta=112; 2025-07-08T12:01:00.613492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;load_stage_name=EXECUTE:granule/portions;fline=constructor_portion.cpp:40;memory_size=278;data_size=269;sum=70826;count=255;size_of_portion=184; 2025-07-08T12:01:00.613515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1430; 2025-07-08T12:01:00.613523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=1; 2025-07-08T12:01:00.613617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=88; 2025-07-08T12:01:00.613623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=1559; 2025-07-08T12:01:00.613627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=1571; 2025-07-08T12:01:00.613634Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=1; 2025-07-08T12:01:00.613658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=19; 2025-07-08T12:01:00.613662Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=1666; 2025-07-08T12:01:00.613685Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=16; 2025-07-08T12:01:00.613701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=12; 2025-07-08T12:01:00.613724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=18; 2025-07-08T12:01:00.613740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=12; 2025-07-08T12:01:00.614103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=354; 2025-07-08T12:01:00.614504Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=388; 2025-07-08T12:01:00.614515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-07-08T12:01:00.614521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-07-08T12:01:00.614527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-07-08T12:01:00.614541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=10; 2025-07-08T12:01:00.614546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-07-08T12:01:00.614562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=10; 2025-07-08T12:01:00.614568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=0; 2025-07-08T12:01:00.614578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-07-08T12:01:00.614590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=7; 2025-07-08T12:01:00.614603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=9; 2025-07-08T12:01:00.614607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3738; 2025-07-08T12:01:00.614648Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119665944;raw_bytes=192854450;count=5;records=1855000} inactive {blob_bytes=632703064;raw_bytes=989320282;count=54;records=9818750} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-07-08T12:01:00.614683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:3121:5083];process=SwitchToWork;fline=columnshard.cpp:73;event=initialize_shard;step=SwitchToWork; 2025-07-08T12:01:00.614691Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:3121:5083];process=SwitchToWork;fline=columnshard.cpp:76;event=initialize_shard;step=SignalTabletActive; 2025-07-08T12:01:00.614708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];process=SwitchToWork;fline=columnshard_impl.cpp:1327;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-07-08T12:01:00.614715Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];process=SwitchToWork;fline=column_engine_logs.cpp:471;event=OnTieringModified;new_count_tierings=0; 2025-07-08T12:01:00.614745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T12:01:00.614766Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T12:01:00.614782Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975651108;tx_id=18446744073709551615;;current_snapshot_ts=1751975949029; 2025-07-08T12:01:00.614791Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T12:01:00.614803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T12:01:00.614808Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T12:01:00.614830Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; 2025-07-08T12:01:00.621301Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:248;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-07-08T12:01:00.621351Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:237;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-07-08T12:01:00.621368Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-07-08T12:01:00.621372Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-07-08T12:01:00.621379Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:442;event=EnqueueBackgroundActivities;periodic=0; 2025-07-08T12:01:00.621410Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:224;event=StartCleanup;portions_count=23; 2025-07-08T12:01:00.621427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:266;event=StartCleanupStop;snapshot=plan_step=1751975651108;tx_id=18446744073709551615;;current_snapshot_ts=1751975949029; 2025-07-08T12:01:00.621438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:299;event=StartCleanup;portions_count=23;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-07-08T12:01:00.621464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:790;background=cleanup;skip_reason=no_changes; 2025-07-08T12:01:00.621470Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:819;background=cleanup;skip_reason=no_changes; 2025-07-08T12:01:00.621492Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:168;event=skip_actualization;waiting=0.999000s; 2025-07-08T12:01:00.621503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:3121:5083];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:748;background=ttl;skip_reason=no_changes; |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |68.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |68.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/benchmarks_init/py3test >> test_generator.py::TestTpcdsGenerator::test_s1_state_and_parts [GOOD] |68.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TRtmrTestReboots::CreateRtmrVolumeWithReboots |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |68.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> YdbTableSplit::SplitByLoadWithDeletes >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> YdbTableSplit::MergeByNoLoadAfterSplit >> YdbTableSplit::RenameTablesAndSplit |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |68.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates >> DataShardSnapshots::BrokenLockChangesDontLeak >> TOlapReboots::DropMultipleStandaloneTables [GOOD] |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |68.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |68.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> ObjectStorageListingTest::ListingNoFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:00:55.083827Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679860664515875:2170];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.083929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:55.087313Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679863262251702:2146];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:55.087406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016fc/r3tmp/tmpH8I5Ui/pdisk_1.dat 2025-07-08T12:00:55.117366Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.121216Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:55.147443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16228, node 1 2025-07-08T12:00:55.167405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016fc/r3tmp/yandex8JUBjE.tmp 2025-07-08T12:00:55.167415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016fc/r3tmp/yandex8JUBjE.tmp 2025-07-08T12:00:55.171417Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016fc/r3tmp/yandex8JUBjE.tmp 2025-07-08T12:00:55.171509Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:55.172140Z INFO: TTestServer started on Port 13116 GrpcPort 16228 2025-07-08T12:00:55.185139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.185165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.193446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13116 PQClient connected to localhost:16228 === TenantModeEnabled() = 1 === Init PQ - start server on port 16228 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:55.221267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:55.221300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:55.228070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:00:55.228121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.228179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:00:55.228455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:00:55.228465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.228683Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:55.229259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:55.229373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.229404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:00:55.229446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.229455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:00:55.229457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:00:55.229460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-07-08T12:00:55.231018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.231027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-07-08T12:00:55.231030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:00:55.233490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.233502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:00:55.233508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:00:55.237315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.237329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.237334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.237343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.238132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:55.238807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:00:55.238853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:00:55.239735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976055284, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:00:55.239771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976055284 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:00:55.239777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.239834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:00:55.239840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:00:55.239875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:00:55.239904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:00:55.240624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:00:55.240677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679860664516317:2378], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:00:55.240689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.240694Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:00:55.240707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.240709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.240713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:00:55.240714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.240722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:00:55.240726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:00:55.240728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:00:55.240730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:00:55.240740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... 4118Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:01:04.604124Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-07-08T12:01:04.604151Z node 1 :PERSQUEUE INFO: new Cookie 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 generated for partition 0 topic 'PQ/account3/folder1/folder2/topic' owner 1236 2025-07-08T12:01:04.604179Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:01:04.604198Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:04.605062Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:01:04.605067Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-07-08T12:01:04.605086Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:04.605107Z node 1 :PQ_WRITE_PROXY INFO: session inited cookie: 22 partition: 0 MaxSeqNo: 0 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 2025-07-08T12:01:04.609025Z :INFO: [] MessageGroupId [1236] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976064609 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:04.609066Z :INFO: [] MessageGroupId [1236] SessionId [] Write session established. Init response: session_id: "1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0" topic: "PQ/account3/folder1/folder2/topic" 2025-07-08T12:01:04.609530Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write 1 messages with Id from 1 to 1 2025-07-08T12:01:04.609870Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: try to update token 2025-07-08T12:01:04.609880Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Send 1 message(s) (0 left), first sequence number is 1 2025-07-08T12:01:04.610537Z :INFO: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: close. Timeout = 10000 ms 2025-07-08T12:01:04.611169Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 grpc read done: success: 1 data: write_request[data omitted] 2025-07-08T12:01:04.611336Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-07-08T12:01:04.611439Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:01:04.611445Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-07-08T12:01:04.611482Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-07-08T12:01:04.611498Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:01:04.611585Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-07-08T12:01:04.611588Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-07-08T12:01:04.611656Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-07-08T12:01:04.611715Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-07-08T12:01:04.611752Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-07-08T12:01:04.611756Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-07-08T12:01:04.611788Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-07-08T12:01:05.377346Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-07-08T12:01:05.377420Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-07-08T12:01:05.377440Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-07-08T12:01:05.377444Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-07-08T12:01:05.377769Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-07-08T12:01:05.377893Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1751976065376 2025-07-08T12:01:05.378080Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:01:05.378092Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-07-08T12:01:05.386231Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [1:7524679890729289475:2488] 2025-07-08T12:01:05.386282Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:01:05.386290Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:01:05.386303Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-07-08T12:01:05.386317Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:01:05.386340Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-07-08T12:01:05.386350Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-07-08T12:01:05.386354Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-07-08T12:01:05.386638Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-07-08T12:01:05.386667Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-07-08T12:01:05.387034Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-07-08T12:01:05.389056Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 8 queued_in_partition_duration_ms: 765 throttled_on_partition_duration_ms: 765 } 2025-07-08T12:01:05.389078Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: acknoledged message 1 2025-07-08T12:01:05.423377Z :INFO: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session will now close 2025-07-08T12:01:05.423399Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: aborting 2025-07-08T12:01:05.423570Z :INFO: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:05.423579Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0] Write session: destroy 2025-07-08T12:01:05.423819Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 grpc read done: success: 0 data: 2025-07-08T12:01:05.423833Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 grpc read failed 2025-07-08T12:01:05.423844Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 grpc closed 2025-07-08T12:01:05.423847Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|5a3b5d42-3fc03398-5f4f5e5-b31a8a49_0 is DEAD 2025-07-08T12:01:05.424018Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:05.424271Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7524679899319224523:2556] destroyed 2025-07-08T12:01:05.424285Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. DURATION 2.971875s 2025-07-08T12:01:05.549956Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679903614191851:2561], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:01:05.550481Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM4ODFjNjAtMWVlOTcwYzYtMzRlNGQ0NjUtYWFkNmY4YmY=, ActorId: [1:7524679903614191849:2560], ActorState: ExecuteState, TraceId: 01jzmyjeg68rzn6bqm1bk6py1p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:01:05.550620Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] Test command err: 2025-07-08T12:00:37.435882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a3e/r3tmp/tmpcNvJpS/pdisk_1.dat 2025-07-08T12:00:37.590674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.606577Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.638163Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:00:37.638416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:37.638449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:37.638480Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:00:37.648874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:37.723560Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T12:00:37.723583Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T12:00:37.723611Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:578:2498] 2025-07-08T12:00:37.739578Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T12:00:37.739614Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T12:00:37.739807Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T12:00:37.739822Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T12:00:37.739881Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T12:00:37.739920Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T12:00:37.739932Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T12:00:37.740330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.740447Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T12:00:37.740567Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T12:00:37.740576Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T12:00:37.754305Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:37.754535Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:37.754613Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:37.754676Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.762810Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:37.762974Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.762996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:37.763143Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:37.763152Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:37.763158Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:37.763206Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:37.763224Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:37.763234Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:37.763300Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:37.767075Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:37.767133Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:37.767150Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:37.767155Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:37.767160Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:37.767164Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:37.767214Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.767220Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.767297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:37.767314Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:37.767326Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:37.767332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:37.767338Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:37.767342Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:37.767346Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:37.767351Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:37.767355Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:37.767449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.767455Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.767462Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:37.767472Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:37.767475Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:37.767498Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:37.767547Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:37.767557Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:37.767572Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:37.767579Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:37.767584Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:37.767591Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:37.767595Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.767668Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:37.767673Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:37.767676Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:37.767680Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.767689Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:37.767692Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:37.767695Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:37.767699Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.767702Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:37.767838Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:37.767845Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.767849Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.767856Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:37.767864Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:37.768224Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00 ... 06.391310Z node 14 :TX_DATASHARD TRACE: Complete execution for [500:281474976715662] at 72075186224037888 on unit DropTable 2025-07-08T12:01:06.391315Z node 14 :TX_DATASHARD TRACE: Complete execution for [500:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-07-08T12:01:06.391330Z node 14 :TX_DATASHARD DEBUG: Complete [500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [14:362:2357], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:01:06.391341Z node 14 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state PreOffline TxInFly 0 2025-07-08T12:01:06.391354Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:06.391386Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [14:613:2523], Recipient [14:618:2526]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-07-08T12:01:06.391545Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [14:827:2693], Recipient [14:618:2526]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [14:830:2696] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:06.391552Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:06.391918Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [14:362:2357], Recipient [14:618:2526]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 2025-07-08T12:01:06.391928Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-07-08T12:01:06.391934Z node 14 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state PreOffline 2025-07-08T12:01:06.391940Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:06.392313Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-07-08T12:01:06.392405Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [14:639:2539], Recipient [14:748:2624]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:01:06.392414Z node 14 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:01:06.392443Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [14:827:2693], Recipient [14:618:2526]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [14:827:2693] ServerId: [14:830:2696] } 2025-07-08T12:01:06.392447Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-07-08T12:01:06.392457Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [14:639:2539], Recipient [14:618:2526]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:01:06.392461Z node 14 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:01:06.392568Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:546:2471], Recipient [14:618:2526]: NActors::TEvents::TEvPoison 2025-07-08T12:01:06.392619Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-07-08T12:01:06.392648Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-07-08T12:01:06.405911Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:838:2704], Recipient [14:840:2705]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:01:06.406836Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:838:2704], Recipient [14:840:2705]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:01:06.406865Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:838:2704], Recipient [14:840:2705]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:01:06.407352Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:840:2705] 2025-07-08T12:01:06.407401Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:06.407744Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:06.408166Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:01:06.408329Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:01:06.408338Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:01:06.408345Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:01:06.408407Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:01:06.408488Z node 14 :TX_DATASHARD TRACE: StateInactive, received event# 275709965, Sender [14:62:2109], Recipient [14:840:2705]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715659 LockNode: 14 Status: STATUS_SUBSCRIBED 2025-07-08T12:01:06.408498Z node 14 :TX_DATASHARD WARN: TDataShard::StateInactive unhandled event type: 275709965 event: NKikimrLongTxService.TEvLockStatus LockId: 281474976715659 LockNode: 14 Status: STATUS_SUBSCRIBED 2025-07-08T12:01:06.408550Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:01:06.408559Z node 14 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [14:855:2705] in generation 2 2025-07-08T12:01:06.408718Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:01:06.408732Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-07-08T12:01:06.408750Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-07-08T12:01:06.408757Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:01:06.408770Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:857:2713] 2025-07-08T12:01:06.408774Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:01:06.408778Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-07-08T12:01:06.408782Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:06.408823Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:840:2705], Recipient [14:840:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:06.408829Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:06.408860Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:840:2705], Recipient [14:840:2705]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-07-08T12:01:06.408864Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-07-08T12:01:06.408932Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:25:2072], Recipient [14:840:2705]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 500} 2025-07-08T12:01:06.408936Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-07-08T12:01:06.408940Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 500 2025-07-08T12:01:06.408964Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:06.409104Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:06.409112Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-07-08T12:01:06.409117Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:06.409155Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-07-08T12:01:06.409161Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-07-08T12:01:06.409168Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2025-07-08T12:01:06.409254Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:840:2705], Recipient [14:748:2624]: {TEvReadSet step# 400 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 144 Seqno# 1 Flags# 0} 2025-07-08T12:01:06.409260Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-07-08T12:01:06.409266Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2025-07-08T12:01:06.409274Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 400 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 144 Seqno# 1 Flags# 0} 2025-07-08T12:01:06.409280Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 400:281474976715661 at 72075186224037889 2025-07-08T12:01:06.409288Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-07-08T12:01:06.409292Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 400 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 144 Seqno# 1 Flags# 0} 2025-07-08T12:01:06.409305Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:748:2624], Recipient [14:840:2705]: {TEvReadSet step# 400 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-07-08T12:01:06.409309Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:06.409313Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2025-07-08T12:01:06.409322Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-07-08T12:01:06.409342Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:25:2072], Recipient [14:840:2705]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 300 NextReadStep# 500 ReadStep# 500 } 2025-07-08T12:01:06.409345Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-07-08T12:01:06.409350Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 300 next step 500 2025-07-08T12:01:06.623136Z node 14 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> TBsVDiskGC::GCPutManyBarriersNoSync >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest |68.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-07-08T12:01:07.810437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b4a/r3tmp/tmpObFi6L/pdisk_1.dat 2025-07-08T12:01:07.966019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.983446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:08.017020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:08.017070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:08.033733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:08.113281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:08.142180Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:01:08.142287Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:08.150974Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:08.151015Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:01:08.151155Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:01:08.151164Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:01:08.151170Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:01:08.151225Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:01:08.151241Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:01:08.151253Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:01:08.161618Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:01:08.166108Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:01:08.166196Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:01:08.166219Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:01:08.166223Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:01:08.166226Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:01:08.166231Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:08.166382Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:01:08.166401Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:01:08.166417Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:08.166422Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:08.166431Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:01:08.166435Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:08.166522Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:01:08.166554Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:08.166600Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:01:08.166614Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:01:08.166839Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:01:08.177239Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:08.177289Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:01:08.334033Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:01:08.334915Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:01:08.334939Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:08.335096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:08.335109Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:01:08.335120Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:01:08.335202Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:01:08.335239Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:01:08.335347Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:08.335367Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:01:08.335823Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:01:08.335921Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:08.336196Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:01:08.336206Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:08.336394Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:01:08.336407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:08.336608Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:08.336617Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:01:08.336623Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:01:08.336640Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:01:08.336650Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:01:08.336660Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:08.337633Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:01:08.337860Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:01:08.337870Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:08.337995Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:01:08.447655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyjh7k3kbwfcfw7x99wcw9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlkOTAzNTUtODY2MjJmZS1jMjk3YTRhLTkxNmIxODI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:08.448883Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:01:08.448994Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:08.472354Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:08.472414Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:08.473625Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:01:08.473677Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-07-08T12:01:08.473723Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-07-08T12:01:08.473752Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] >> IncrementalBackup::BackupRestore >> IncrementalBackup::SimpleBackup ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleStandaloneTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:54.445925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:54.445950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:54.445955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:54.445960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:54.445966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:54.445970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:54.445978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:54.445994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:54.446074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:54.459289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:54.459319Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:54.462839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:54.462882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:54.462919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:54.464397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:54.464508Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:54.464628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:54.464698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:54.465145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:54.465200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:54.465426Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:54.465434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:54.465453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:54.465462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:54.465467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:54.465503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:54.466668Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:54.484558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:54.484630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.484689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:54.484727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:54.484738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.485528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:54.485554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:54.485595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.485603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:54.485608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:54.485612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:54.485926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.485935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:54.485939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:54.486183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.486189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.486195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:54.486202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:54.486754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:54.487055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:54.487093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:54.487303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:54.487323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:54.487330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:54.487397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:54.487403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:54.487429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:54.487440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:54.487914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:54.487921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:54.487972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:54.487977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:54.488045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:54.488051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:54.488062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:54.488066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:54.488071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:54.488074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:54.488078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:54.488083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 7594046678944, cookie: 1004 2025-07-08T12:01:06.845203Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:06.845207Z node 112 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:06.845212Z node 112 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T12:01:06.845217Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-07-08T12:01:06.845356Z node 112 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:06.845368Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:06.845372Z node 112 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:06.845376Z node 112 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-07-08T12:01:06.845380Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:06.845392Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-07-08T12:01:06.845526Z node 112 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T12:01:06.846314Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:06.846504Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:01:06.846662Z node 112 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;self_id=[112:331:2318];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:863;event=tablet_die; Forgetting tablet 72075186233409546 2025-07-08T12:01:06.848042Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:06.848053Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:01:06.848069Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:06.848363Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-07-08T12:01:06.848856Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:06.848883Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:06.850143Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T12:01:06.850159Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T12:01:06.850204Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-07-08T12:01:06.861186Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2025-07-08T12:01:06.861210Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1004, tablet: 72075186233409547, partId: 0 2025-07-08T12:01:06.861235Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1004:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 1004 2025-07-08T12:01:06.861248Z node 112 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1004:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T12:01:06.861751Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T12:01:06.861793Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T12:01:06.861799Z node 112 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 1004:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:06.861821Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-07-08T12:01:06.861846Z node 112 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:06.861851Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:06.861856Z node 112 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:06.861859Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:06.861864Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: true 2025-07-08T12:01:06.861876Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [112:361:2339] message: TxId: 1004 2025-07-08T12:01:06.861882Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:06.861886Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T12:01:06.861890Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T12:01:06.861919Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:06.862317Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:01:06.862348Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T12:01:06.862354Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [112:527:2485] 2025-07-08T12:01:06.862444Z node 112 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-07-08T12:01:06.862637Z node 112 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[112:431:2399];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:863;event=tablet_die; 2025-07-08T12:01:06.863794Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-07-08T12:01:06.864002Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409547 2025-07-08T12:01:06.864191Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:06.864200Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:06.864215Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:06.864839Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-07-08T12:01:06.864853Z node 112 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-07-08T12:01:06.864929Z node 112 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1004 2025-07-08T12:01:06.865037Z node 112 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:06.865076Z node 112 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable1" took 48us result status StatusPathDoesNotExist 2025-07-08T12:01:06.865118Z node 112 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T12:01:06.865185Z node 112 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:06.865199Z node 112 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable2" took 16us result status StatusPathDoesNotExist 2025-07-08T12:01:06.865216Z node 112 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |69.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-07-08T12:00:37.404746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a27/r3tmp/tmpCyp3N9/pdisk_1.dat 2025-07-08T12:00:37.539239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.556765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.592616Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:00:37.592828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:37.592869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:37.592898Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:00:37.603392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:37.690220Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T12:00:37.690244Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T12:00:37.690273Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:578:2498] 2025-07-08T12:00:37.718128Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T12:00:37.718174Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T12:00:37.718386Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T12:00:37.718403Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T12:00:37.718460Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T12:00:37.718504Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T12:00:37.718519Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T12:00:37.718913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.719042Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T12:00:37.719171Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T12:00:37.719181Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T12:00:37.733369Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:37.733679Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:37.733784Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:37.733856Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.742694Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:37.742884Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.742909Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:37.743068Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:37.743079Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:37.743085Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:37.743138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:37.743161Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:37.743173Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:37.743248Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:37.747685Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:37.747771Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:37.747795Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:37.747801Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:37.747806Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:37.747811Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:37.747878Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.747885Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.747973Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:37.747995Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:37.748009Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:37.748017Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:37.748024Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:37.748028Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:37.748032Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:37.748037Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:37.748043Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:37.748159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.748167Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.748175Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:37.748186Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:37.748190Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:37.748217Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:37.748272Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:37.748282Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:37.748296Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:37.748304Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:37.748308Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:37.748316Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:37.748320Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.748386Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:37.748391Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:37.748395Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:37.748399Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.748411Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:37.748415Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:37.748419Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:37.748422Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.748427Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:37.748605Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:37.748613Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.748617Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.748625Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:37.748635Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:37.749090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00 ... order: 3, at tablet: 72075186224037888 2025-07-08T12:01:06.887134Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-07-08T12:01:06.887255Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-07-08T12:01:06.887271Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:1042:2865], Recipient [13:908:2755]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-07-08T12:01:06.887280Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:908:2755], Recipient [13:908:2755]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-07-08T12:01:06.887284Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037890 2025-07-08T12:01:06.887287Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037890 2025-07-08T12:01:06.887297Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037890 2025-07-08T12:01:06.887302Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-07-08T12:01:06.887356Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-07-08T12:01:07.029116Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:908:2755]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-07-08T12:01:07.224182Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] Handle TEvExecuteKqpTransaction 2025-07-08T12:01:07.224209Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] TxId# 281474976715669 ProcessProposeKqpTransaction 2025-07-08T12:01:07.224411Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyjg266qpgyw1nngzx6pe9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MzgyYjQyOTYtZjcxZTQyZTItOTI5YzhjMC04NTY2ZDQ5NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-07-08T12:01:07.224854Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1532:3290], Recipient [13:725:2610]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-07-08T12:01:07.224884Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-07-08T12:01:07.224895Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v7500/281474976715668 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:01:07.224902Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-07-08T12:01:07.224912Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-07-08T12:01:07.224931Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-07-08T12:01:07.224937Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-07-08T12:01:07.224942Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:07.225000Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-07-08T12:01:07.225014Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-07-08T12:01:07.225024Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-07-08T12:01:07.225028Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-07-08T12:01:07.225033Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-07-08T12:01:07.225036Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-07-08T12:01:07.225051Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-07-08T12:01:07.225104Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1532:3290], 0} after executionsCount# 1 2025-07-08T12:01:07.225112Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1532:3290], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-07-08T12:01:07.225129Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1532:3290], 0} finished in read 2025-07-08T12:01:07.225138Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-07-08T12:01:07.225141Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-07-08T12:01:07.225145Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:01:07.225149Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:01:07.225162Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-07-08T12:01:07.225166Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:01:07.225170Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-07-08T12:01:07.225175Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-07-08T12:01:07.225194Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-07-08T12:01:07.225423Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1532:3290], Recipient [13:725:2610]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-07-08T12:01:07.225432Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-07-08T12:01:07.268036Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] Handle TEvExecuteKqpTransaction 2025-07-08T12:01:07.268067Z node 13 :TX_PROXY DEBUG: actor# [13:60:2107] TxId# 281474976715670 ProcessProposeKqpTransaction 2025-07-08T12:01:07.268254Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyjg4t8e863w9qphkc2eep, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTQ4NGZlYWUtOGU0OWE2MjYtNTZiZWFiNTMtMTAzOGUyYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-07-08T12:01:07.268665Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1563:3315], Recipient [13:1003:2834]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-07-08T12:01:07.268699Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-07-08T12:01:07.268711Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v7500/281474976715668 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-07-08T12:01:07.268718Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-07-08T12:01:07.268729Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-07-08T12:01:07.268750Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-07-08T12:01:07.268756Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-07-08T12:01:07.268762Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:07.268767Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-07-08T12:01:07.268779Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-07-08T12:01:07.268789Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-07-08T12:01:07.268793Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-07-08T12:01:07.268797Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-07-08T12:01:07.268800Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-07-08T12:01:07.268814Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-07-08T12:01:07.268869Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1563:3315], 0} after executionsCount# 1 2025-07-08T12:01:07.268877Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1563:3315], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-07-08T12:01:07.268893Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1563:3315], 0} finished in read 2025-07-08T12:01:07.268901Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-07-08T12:01:07.268905Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-07-08T12:01:07.268908Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-07-08T12:01:07.268912Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-07-08T12:01:07.268922Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-07-08T12:01:07.268925Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-07-08T12:01:07.268929Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-07-08T12:01:07.268933Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-07-08T12:01:07.268972Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-07-08T12:01:07.269173Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1563:3315], Recipient [13:1003:2834]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-07-08T12:01:07.269183Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-07-08T12:00:37.446025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a4f/r3tmp/tmpMY3hyH/pdisk_1.dat 2025-07-08T12:00:37.591175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.606913Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:37.640517Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:00:37.640756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:37.640793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:37.640828Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:00:37.651307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:37.734411Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T12:00:37.734438Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T12:00:37.734472Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:578:2498] 2025-07-08T12:00:37.750767Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T12:00:37.750812Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T12:00:37.751036Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T12:00:37.751053Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T12:00:37.751117Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T12:00:37.751160Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T12:00:37.751184Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T12:00:37.751582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:37.751707Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T12:00:37.751839Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T12:00:37.751850Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T12:00:37.765621Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:37.765894Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:37.765981Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:37.766056Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:37.774291Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:37.774450Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:37.774469Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:37.774608Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:37.774615Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:37.774621Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:37.774669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:37.774683Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:37.774694Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:37.774753Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:37.778504Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:37.778562Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:37.778580Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:37.778584Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:37.778588Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:37.778593Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:37.778640Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.778646Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:37.778727Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:37.778744Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:37.778754Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:37.778760Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:37.778767Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:37.778772Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:37.778775Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:37.778780Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:37.778784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:37.778891Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.778905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:37.778911Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:37.778922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:37.778926Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:37.778953Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:37.779011Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:37.779022Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:37.779038Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:37.779045Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:37.779049Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:37.779055Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:37.779059Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.779122Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:37.779126Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:37.779130Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:37.779133Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.779144Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:37.779147Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:37.779150Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:37.779153Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:37.779158Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:37.779308Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:37.779315Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:37.779319Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:37.779327Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-07-08T12:00:37.779336Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:00:37.779729Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00 ... ProposeTransaction 2025-07-08T12:01:07.774749Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:617:2525], Recipient [16:617:2525]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:01:07.774756Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:01:07.774775Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:07.774862Z node 16 :TX_DATASHARD TRACE: TxId: 281474976715661, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-07-08T12:01:07.774874Z node 16 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, task: 1, write point (Uint32 : 3) 2025-07-08T12:01:07.774881Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 3) table: [72057594046644480:2:1] 2025-07-08T12:01:07.774934Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-07-08T12:01:07.774951Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-07-08T12:01:07.774956Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-07-08T12:01:07.774961Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:07.774965Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:01:07.774985Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-07-08T12:01:07.774990Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-07-08T12:01:07.774994Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:01:07.774997Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-07-08T12:01:07.775001Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ExecuteKqpDataTx 2025-07-08T12:01:07.775018Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715661] (execute_kqp_data_tx) at 72075186224037888 aborting because it cannot acquire locks 2025-07-08T12:01:07.775025Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-07-08T12:01:07.775028Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-07-08T12:01:07.775031Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:01:07.775035Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-07-08T12:01:07.775042Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is DelayComplete 2025-07-08T12:01:07.775045Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:01:07.775049Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:01:07.775052Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:01:07.775063Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-07-08T12:01:07.775066Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:01:07.775070Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715661] at 72075186224037888 has finished 2025-07-08T12:01:07.775087Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:07.775091Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-07-08T12:01:07.775097Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715661 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-07-08T12:01:07.775110Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:07.775204Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=YmJjMjNjNzAtZmViYjAzYWItYTdlMThkMWMtZGNlMzdiZGU=, ActorId: [16:690:2579], ActorState: ExecuteState, TraceId: 01jzmyjgn66ems7xtxtegr309z, Create QueryResponse for error on request, msg: 2025-07-08T12:01:07.775349Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyjgn66ems7xtxtegr309z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=YmJjMjNjNzAtZmViYjAzYWItYTdlMThkMWMtZGNlMzdiZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.775407Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [16:743:2579], Recipient [16:617:2525]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 743 RawX2: 68719479315 } TxBody: " \0018\001j3\010\001\032\'\n#\t\213\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715662 ExecLevel: 0 Flags: 8 2025-07-08T12:01:07.775412Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:01:07.775426Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:617:2525], Recipient [16:617:2525]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:01:07.775430Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-07-08T12:01:07.775436Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:07.775455Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715659, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-07-08T12:01:07.775463Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2025-07-08T12:01:07.775468Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:07.775471Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2025-07-08T12:01:07.775475Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:07.775478Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2025-07-08T12:01:07.775486Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v300/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v301/0 ImmediateWriteEdgeReplied# v301/0 2025-07-08T12:01:07.775493Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2025-07-08T12:01:07.775496Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:07.775499Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-07-08T12:01:07.775503Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-07-08T12:01:07.775506Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2025-07-08T12:01:07.775514Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-07-08T12:01:07.775523Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715659 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-07-08T12:01:07.775542Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-07-08T12:01:07.775571Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:07.775575Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-07-08T12:01:07.775578Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:01:07.775581Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-07-08T12:01:07.775587Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-07-08T12:01:07.775596Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-07-08T12:01:07.775600Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:01:07.775603Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:01:07.775606Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:01:07.775611Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:07.775614Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:01:07.775618Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-07-08T12:01:07.775626Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:07.775629Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-07-08T12:01:07.775634Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:07.775797Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:62:2109], Recipient [16:617:2525]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715659 LockNode: 16 Status: STATUS_NOT_FOUND 2025-07-08T12:01:07.776290Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:752:2627], Recipient [16:617:2525]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:07.776297Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:07.776304Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:751:2626], serverId# [16:752:2627], sessionId# [0:0:0] 2025-07-08T12:01:07.776320Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:545:2470], Recipient [16:617:2525]: NKikimr::TEvDataShard::TEvGetOpenTxs >> IncrementalBackup::SimpleRestore >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TBsVDiskGC::GCPutManyBarriersNoSync [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot |69.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:01:06.651955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679909654615928:2147];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.657080Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679909562757437:2246];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016bf/r3tmp/tmpsZaur6/pdisk_1.dat 2025-07-08T12:01:06.682805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.682834Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.685556Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.686700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.736042Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27443, node 1 2025-07-08T12:01:06.751948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.751976Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.759026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.781698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.781725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.789531Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:01:06.789943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.793981Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016bf/r3tmp/yandexfhjGDA.tmp 2025-07-08T12:01:06.793995Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016bf/r3tmp/yandexfhjGDA.tmp 2025-07-08T12:01:06.794040Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016bf/r3tmp/yandexfhjGDA.tmp 2025-07-08T12:01:06.794080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:06.806489Z INFO: TTestServer started on Port 30334 GrpcPort 27443 TClient is connected to server localhost:30334 PQClient connected to localhost:27443 === TenantModeEnabled() = 1 === Init PQ - start server on port 27443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:06.853971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:01:06.854037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.854104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:01:06.854183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:06.854192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:01:06.855159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:01:06.855168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:01:06.855172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2025-07-08T12:01:06.855771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:01:06.856115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.856119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.856123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.856128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.856861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 waiting... 2025-07-08T12:01:06.857638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:01:06.857690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:06.858666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976066904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:06.858702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976066904 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:06.858709Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.858769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:01:06.858775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.858810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:01:06.858822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:01:06.859220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:01:06.859228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: false 2025-07-08T12:01:06.859232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-07-08T12:01:06.859335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:01:06.859338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:01:06.859376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:01:06.859379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679909654616403:2379], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:01:06.859386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.859390Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:01:06.859400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:01:06.859403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.859406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:01:06.859415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.859418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:01:06.859421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.859425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:01:06.859427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:01:06.859437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594 ... ITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:08.934285Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679918901986251:2326] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:01:08.934291Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:08.934437Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:01:08.934489Z node 3 :PERSQUEUE INFO: new Cookie 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-07-08T12:01:08.934635Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 Finish: 0 2025-07-08T12:01:08.935278Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 grpc read done: success: 0 data: 2025-07-08T12:01:08.935287Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 grpc read failed 2025-07-08T12:01:08.935364Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 2025-07-08T12:01:08.935368Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b8aee795-a3b755d6-bea2fb82-bd73aa73_0 is DEAD 2025-07-08T12:01:08.935437Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-07-08T12:01:08.952318Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715663 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54256" , at schemeshard: 72057594046644480 2025-07-08T12:01:08.952379Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:08.952411Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 6] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-07-08T12:01:08.952413Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:01:08.952445Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:08.952455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:08.952472Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-07-08T12:01:08.952476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.952481Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-07-08T12:01:08.952482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.952494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-07-08T12:01:08.952511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715663, ready parts: 1/1, is published: false 2025-07-08T12:01:08.952520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-07-08T12:01:08.952523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.952527Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 2025-07-08T12:01:08.952532Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715663, publications: 1, subscribers: 0 2025-07-08T12:01:08.952539Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715663, [OwnerId: 72057594046644480, LocalPathId: 6], 3 2025-07-08T12:01:08.953077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715663, response: Status: StatusSuccess TxId: 281474976715663 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:08.953128Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-07-08T12:01:08.953169Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:01:08.953172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715663, path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:01:08.953216Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:01:08.953220Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7524679918901985387:2364], at schemeshard: 72057594046644480, txId: 281474976715663, path id: 6 2025-07-08T12:01:08.953468Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.953480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.953483Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715663 2025-07-08T12:01:08.953486Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715663, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2025-07-08T12:01:08.953489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-07-08T12:01:08.953507Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715663, subscribers: 0 2025-07-08T12:01:08.954464Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.956066Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:01:08.956084Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-07-08T12:01:08.956227Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-07-08T12:01:08.956255Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:54240 2025-07-08T12:01:08.956264Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54240 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-07-08T12:01:08.956268Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:08.956530Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-07-08T12:01:08.956587Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:01:08.956594Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:08.956596Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:08.956608Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679918901986284:2337] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:01:08.956614Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:08.956742Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:01:08.956801Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-07-08T12:01:08.956915Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 2025-07-08T12:01:08.957375Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-07-08T12:01:08.957472Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-07-08T12:01:08.957487Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:08.957693Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 describe result for acl check 2025-07-08T12:01:08.957723Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 2025-07-08T12:01:08.957836Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|95996c1f-79bedefa-e816e21c-1ab1aa06_0 is DEAD 2025-07-08T12:01:08.957935Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-07-08T12:01:06.641488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679909243677866:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.641589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.644792Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679908510751776:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.676731Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016b9/r3tmp/tmpgv4cKu/pdisk_1.dat 2025-07-08T12:01:06.689653Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.689709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.745922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.745955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.747289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.757082Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23542, node 1 2025-07-08T12:01:06.797011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016b9/r3tmp/yandexUUCLCG.tmp 2025-07-08T12:01:06.797023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016b9/r3tmp/yandexUUCLCG.tmp 2025-07-08T12:01:06.797086Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016b9/r3tmp/yandexUUCLCG.tmp 2025-07-08T12:01:06.797127Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:06.800661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.800684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.802251Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:01:06.802758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.805933Z INFO: TTestServer started on Port 27087 GrpcPort 23542 TClient is connected to server localhost:27087 PQClient connected to localhost:23542 === TenantModeEnabled() = 1 === Init PQ - start server on port 23542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:06.851726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-07-08T12:01:06.851778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.851830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:01:06.851882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:06.851891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:06.852684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:06.852744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:01:06.852804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.852812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:01:06.852814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-07-08T12:01:06.852817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 2 -> 3 2025-07-08T12:01:06.853377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.853386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:01:06.853389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 3 -> 128 2025-07-08T12:01:06.853764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.853771Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.853775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.853779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.854491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:06.854917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-07-08T12:01:06.854965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:06.855535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1751976066904, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1751976066904 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:06.855571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.855621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976720657:0 128 -> 240 2025-07-08T12:01:06.855625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-07-08T12:01:06.855653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-07-08T12:01:06.855662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-07-08T12:01:06.856249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:01:06.856257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-07-08T12:01:06.856306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:01:06.856309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7524679909243678353:2396], at schemeshard: 72057594046644480, txId: 281474976720657, path id: 1 2025-07-08T12:01:06.856316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:06.856320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976720657:0 ProgressState 2025-07-08T12:01:06.856331Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:01:06.856333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.856336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720657:0 progress is 1/1 2025-07-08T12:01:06.856337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.856340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2025-07-08T12:01:06.856344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2025-07-08T12:01:06.856348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720657:0 2025-07-08T12:01:06.856350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720657:0 2025-07-08T12:01:06.856363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-07-08T12:01:06.856367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720657, publications: 1, subscribers: 0 2025-07-08T12:01:06.856369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-07-08T12:01:06.856959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 720575940466 ... nt64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:08.889476Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679915516331419:2326] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:01:08.889483Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:08.890265Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:01:08.890300Z node 3 :PERSQUEUE INFO: new Cookie 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-07-08T12:01:08.890439Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 2025-07-08T12:01:08.890909Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 grpc read done: success: 0 data: 2025-07-08T12:01:08.890920Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 grpc read failed 2025-07-08T12:01:08.890995Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 2025-07-08T12:01:08.891004Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|c73ce553-eef9e85a-a29e30c7-94192fb8_0 is DEAD 2025-07-08T12:01:08.891072Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison Finish: 0 === InitializeWritePQService done === PersQueueClient === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-07-08T12:01:08.896760Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715663 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:40394" , at schemeshard: 72057594046644480 2025-07-08T12:01:08.896824Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:08.896859Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 6] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-07-08T12:01:08.896861Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:01:08.896904Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715663:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:08.896909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:08.896930Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-07-08T12:01:08.896933Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.896937Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715663:0 progress is 1/1 2025-07-08T12:01:08.896939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.896967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-07-08T12:01:08.896981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715663, ready parts: 1/1, is published: false 2025-07-08T12:01:08.896987Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-07-08T12:01:08.896989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715663 ready parts: 1/1 2025-07-08T12:01:08.896993Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715663:0 2025-07-08T12:01:08.896997Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715663, publications: 1, subscribers: 0 2025-07-08T12:01:08.897000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715663, [OwnerId: 72057594046644480, LocalPathId: 6], 3 2025-07-08T12:01:08.901481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715663, response: Status: StatusSuccess TxId: 281474976715663 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:08.901563Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715663, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-07-08T12:01:08.901625Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-07-08T12:01:08.901635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715663, path id: [OwnerId: 72057594046644480, LocalPathId: 6] 2025-07-08T12:01:08.901697Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-07-08T12:01:08.901701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7524679915516330556:2364], at schemeshard: 72057594046644480, txId: 281474976715663, path id: 6 2025-07-08T12:01:08.902113Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.902127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.902129Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715663 2025-07-08T12:01:08.902133Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715663, pathId: [OwnerId: 72057594046644480, LocalPathId: 6], version: 3 2025-07-08T12:01:08.902138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-07-08T12:01:08.902168Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715663, subscribers: 0 2025-07-08T12:01:08.902755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715663 2025-07-08T12:01:08.903046Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:01:08.903057Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-07-08T12:01:08.903208Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-07-08T12:01:08.903233Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:40388 2025-07-08T12:01:08.903239Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:40388 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-07-08T12:01:08.903242Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:08.903520Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-07-08T12:01:08.903572Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-07-08T12:01:08.903578Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:08.903579Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:08.903591Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679915516331449:2337] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-07-08T12:01:08.903596Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:08.903770Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-07-08T12:01:08.903852Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-07-08T12:01:08.904019Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 2025-07-08T12:01:08.904409Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-07-08T12:01:08.904465Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-07-08T12:01:08.904471Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 2025-07-08T12:01:08.904535Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|dad37e5a-c95a2969-959690f5-303d8d51_0 is DEAD 2025-07-08T12:01:08.904618Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> ObjectStorageListingTest::FilterListing [GOOD] >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] |69.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |69.0%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> KqpSplit::AfterResolve+Unspecified >> KqpScan::SelfJoin3xSameLabels >> KqpSplit::IntersectionLosesRange+Ascending >> KqpScan::AggregateWithFunction >> KqpSplit::UndeliveryOnFinishedRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-07-08T12:01:09.415904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b03/r3tmp/tmpd672Re/pdisk_1.dat 2025-07-08T12:01:09.578500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:09.595573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:09.627712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:09.627927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:09.638502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:09.717248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:09.734962Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:01:09.735063Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:09.743394Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:09.743438Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:01:09.743627Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:01:09.743639Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:01:09.743646Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:01:09.743707Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:01:09.743729Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:01:09.743744Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:01:09.754116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:01:09.758016Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:01:09.758097Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:01:09.758120Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:01:09.758124Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:01:09.758128Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:01:09.758132Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:09.758280Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:01:09.758304Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:01:09.758323Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:09.758329Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:09.758337Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:01:09.758342Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:09.758432Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:01:09.758465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:09.758516Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:01:09.758531Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:01:09.758792Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:01:09.769167Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:09.769217Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-07-08T12:01:09.917549Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:640:2542], serverId# [1:642:2544], sessionId# [0:0:0] 2025-07-08T12:01:09.918545Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-07-08T12:01:09.918576Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:09.918758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:09.918771Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:01:09.918781Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-07-08T12:01:09.918871Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-07-08T12:01:09.918908Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-07-08T12:01:09.919038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:01:09.919062Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-07-08T12:01:09.919502Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-07-08T12:01:09.919616Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:09.919891Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-07-08T12:01:09.919899Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:09.920032Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-07-08T12:01:09.920041Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:09.920205Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:09.920212Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:01:09.920216Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-07-08T12:01:09.920229Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:361:2356], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:01:09.920236Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-07-08T12:01:09.920245Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:09.920845Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:01:09.923559Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-07-08T12:01:09.923581Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:09.923807Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-07-08T12:01:09.975860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyjjs5draamzh8y6ngsqx2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZlZTI4ZTItYjZlODRjNzktM2I0OTlmODYtMzYwNGVkOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:09.977125Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:685:2577], serverId# [1:686:2578], sessionId# [0:0:0] 2025-07-08T12:01:09.977209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:01:09.998224Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:01:09.998281Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:09.999445Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:01:09.999496Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-07-08T12:01:09.999546Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-07-08T12:01:09.999576Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:693:2584], serverId# [1:694:2585], sessionId# [0:0:0] 2025-07-08T12:01:09.999808Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:699:2590], serverId# [1:700:2591], sessionId# [0:0:0] 2025-07-08T12:01:09.999828Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-07-08T12:01:09.999847Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-07-08T12:01:09.999868Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:699:2590], serverId# [1:700:2591], sessionId# [0:0:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-07-08T12:00:54.332193Z :BasicWriteSession INFO: Random seed for debugging is 1751976054332188 2025-07-08T12:00:54.580124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679859024682155:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:54.580185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:54.589056Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679859223130540:2246];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001ecb/r3tmp/tmp215Amk/pdisk_1.dat 2025-07-08T12:00:54.612097Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:54.613212Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:54.614441Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:54.642192Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:54.646196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.646218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.647457Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:54.647724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16755, node 1 2025-07-08T12:00:54.661474Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001ecb/r3tmp/yandexSOnSNM.tmp 2025-07-08T12:00:54.661487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001ecb/r3tmp/yandexSOnSNM.tmp 2025-07-08T12:00:54.665003Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001ecb/r3tmp/yandexSOnSNM.tmp 2025-07-08T12:00:54.665062Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:54.666512Z INFO: TTestServer started on Port 16508 GrpcPort 16755 TClient is connected to server localhost:16508 PQClient connected to localhost:16755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:54.715383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.715414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.716662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:54.716857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T12:00:54.768479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720659, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:54.976546Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679859024682931:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.976936Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679859223130574:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.977027Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmRiY2YzZTEtNjU2MDM0ZGItNzA5MjIwNTQtOWQ1N2IxY2Q=, ActorId: [1:7524679859024682928:2290], ActorState: ExecuteState, TraceId: 01jzmyj44gdxfqc59sm0p0c9tv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.977464Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.977287Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTVmMGJmMGItNzYxOTAwYTMtOWU4ODAxMjctYzljMDAwZjU=, ActorId: [2:7524679859223130572:2262], ActorState: ExecuteState, TraceId: 01jzmyj4433c38ekr1z5wzr7yw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.977595Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.982015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.056980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:55.142160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:16755", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:55.183255Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jzmyj4btb7pxr2ef2tkr7dwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI3ZmExYTgtNjZhMTY1YWMtYjc5YzZkMzctNGY2YjZmNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679863319650655:2911] 2025-07-08T12:00:55.584593Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:55.585496Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:59.578171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679859024682155:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:59.578203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:59.589261Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679859223130540:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:59.589299Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:01:00.328446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:16755 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:01:00.425856Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:16755 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976720677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:01:00.471737Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-07-08T12:01:00.472489Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-07-08T12:01:00.472547Z n ... --test-topic" PathId: 9 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:28379 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:01:09.066524Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:28379 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:01:09.570224Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-07-08T12:01:09.573632Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-07-08T12:01:09.573856Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-07-08T12:01:09.573863Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:28379 2025-07-08T12:01:09.574261Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-07-08T12:01:09.574792Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:01:09.574806Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-07-08T12:01:09.574957Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-07-08T12:01:09.574987Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:54374 2025-07-08T12:01:09.574992Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54374 proto=v1 topic=test-topic durationSec=0 2025-07-08T12:01:09.574995Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:09.575476Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-07-08T12:01:09.575512Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-07-08T12:01:09.575514Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:09.575516Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:09.575523Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-07-08T12:01:09.576011Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-07-08T12:01:09.602340Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-07-08T12:01:09.603691Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679921789225014:2455] connected; active server actors: 1 2025-07-08T12:01:09.603991Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679921789225014:2455] disconnected; active server actors: 1 2025-07-08T12:01:09.604002Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679921789225014:2455] disconnected no session 2025-07-08T12:01:09.603810Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-07-08T12:01:09.603824Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-07-08T12:01:09.624320Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976069624 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:09.624359Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|697b17e8-d501af74-8f46003a-c6b95280_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-07-08T12:01:09.624459Z :INFO: [] MessageGroupId [src] SessionId [src|697b17e8-d501af74-8f46003a-c6b95280_0] Write session: close. Timeout = 0 ms 2025-07-08T12:01:09.624470Z :INFO: [] MessageGroupId [src] SessionId [src|697b17e8-d501af74-8f46003a-c6b95280_0] Write session will now close 2025-07-08T12:01:09.624477Z :DEBUG: [] MessageGroupId [src] SessionId [src|697b17e8-d501af74-8f46003a-c6b95280_0] Write session: aborting 2025-07-08T12:01:09.624588Z :INFO: [] MessageGroupId [src] SessionId [src|697b17e8-d501af74-8f46003a-c6b95280_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:09.624594Z :DEBUG: [] MessageGroupId [src] SessionId [src|697b17e8-d501af74-8f46003a-c6b95280_0] Write session: destroy 2025-07-08T12:01:09.622977Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-07-08T12:01:09.622998Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-07-08T12:01:09.623001Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679921789224958:2455] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-07-08T12:01:09.623010Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:09.623748Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-07-08T12:01:09.623761Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7524679921789225030:2455], now have 1 active actors on pipe 2025-07-08T12:01:09.623820Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:09.623829Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:09.623858Z node 3 :PERSQUEUE INFO: new Cookie src|697b17e8-d501af74-8f46003a-c6b95280_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-07-08T12:01:09.623889Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:01:09.623906Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:09.623986Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:09.623989Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:09.624001Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:09.624025Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|697b17e8-d501af74-8f46003a-c6b95280_0 2025-07-08T12:01:09.633007Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|697b17e8-d501af74-8f46003a-c6b95280_0 grpc read done: success: 0 data: 2025-07-08T12:01:09.633028Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|697b17e8-d501af74-8f46003a-c6b95280_0 grpc read failed 2025-07-08T12:01:09.633041Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|697b17e8-d501af74-8f46003a-c6b95280_0 grpc closed 2025-07-08T12:01:09.633050Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|697b17e8-d501af74-8f46003a-c6b95280_0 is DEAD 2025-07-08T12:01:09.633332Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:09.633660Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7524679921789225030:2455] destroyed 2025-07-08T12:01:09.633683Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-07-08T12:00:53.766150Z :FallbackToSingleDb INFO: Random seed for debugging is 1751976053766144 2025-07-08T12:00:53.914005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679855166331988:2146];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.914088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.919154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679851804463960:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.919205Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.957856Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001f02/r3tmp/tmplawcci/pdisk_1.dat 2025-07-08T12:00:53.961974Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:53.993410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3504, node 1 2025-07-08T12:00:54.008731Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001f02/r3tmp/yandexT0xjXs.tmp 2025-07-08T12:00:54.008748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001f02/r3tmp/yandexT0xjXs.tmp 2025-07-08T12:00:54.012417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.012443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.013528Z INFO: TTestServer started on Port 9779 GrpcPort 3504 2025-07-08T12:00:54.013928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:54.019863Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001f02/r3tmp/yandexT0xjXs.tmp 2025-07-08T12:00:54.019926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9779 PQClient connected to localhost:3504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:54.059210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.059733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.059746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-07-08T12:00:54.061396Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:54.061636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:54.077090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:54.117263Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:54.294941Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679856099431298:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.295058Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODhhMjMzNWYtYTBiNjIzNWYtYmI2YTZhZDUtMjI3NjM5OWY=, ActorId: [2:7524679856099431296:2262], ActorState: ExecuteState, TraceId: 01jzmyj3fq9evjyqq33ed6dgpr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.295271Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679859461300093:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.295468Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.295558Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Yzg4NjdkYTYtOGY5NjI0Yy1iNzI5NDIxYy0yOWJhYTA4Ng==, ActorId: [1:7524679859461300090:2290], ActorState: ExecuteState, TraceId: 01jzmyj3g0ensrbgwghrpvxx48, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.295645Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.302655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.381003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.458091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:3504", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:54.567111Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj3rj1smaba2tztjjq7kg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTZmNDE4NS00YmZiYjZjLTIyZjg4NDhiLThmY2EzM2Y5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679859461300476:2871] 2025-07-08T12:00:54.914463Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:54.921272Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:58.912733Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679855166331988:2146];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.912761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:58.919344Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679851804463960:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.919377Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:00:59.686598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:3504 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:00:59.725051Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:3504 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976715677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:00:59.745503Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679880936137516:3217] connected; active server ac ... syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-07-08T12:01:08.794929Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:08.794931Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:08.794936Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-07-08T12:01:08.795430Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-07-08T12:01:08.816549Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-07-08T12:01:08.816681Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679917130084166:2444] connected; active server actors: 1 2025-07-08T12:01:08.816693Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-07-08T12:01:08.816695Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-07-08T12:01:08.816840Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679917130084166:2444] disconnected; active server actors: 1 2025-07-08T12:01:08.816854Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679917130084166:2444] disconnected no session 2025-07-08T12:01:08.835615Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-07-08T12:01:08.835637Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-07-08T12:01:08.835642Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679917130084131:2444] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-07-08T12:01:08.835651Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:08.836054Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-07-08T12:01:08.836683Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|18dfadac-d9fec484-23ef5007-b071523f_0 2025-07-08T12:01:08.835993Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7524679917130084186:2444], now have 1 active actors on pipe 2025-07-08T12:01:08.836160Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:08.836169Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:08.836211Z node 4 :PERSQUEUE INFO: new Cookie src|18dfadac-d9fec484-23ef5007-b071523f_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-07-08T12:01:08.836250Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:01:08.836271Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:08.836567Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:08.836573Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:08.836596Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:08.837139Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976068837 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:08.837171Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|18dfadac-d9fec484-23ef5007-b071523f_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-07-08T12:01:08.837248Z :INFO: [] MessageGroupId [src] SessionId [src|18dfadac-d9fec484-23ef5007-b071523f_0] Write session: close. Timeout = 0 ms 2025-07-08T12:01:08.837253Z :INFO: [] MessageGroupId [src] SessionId [src|18dfadac-d9fec484-23ef5007-b071523f_0] Write session will now close 2025-07-08T12:01:08.837257Z :DEBUG: [] MessageGroupId [src] SessionId [src|18dfadac-d9fec484-23ef5007-b071523f_0] Write session: aborting 2025-07-08T12:01:08.837332Z :INFO: [] MessageGroupId [src] SessionId [src|18dfadac-d9fec484-23ef5007-b071523f_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:08.837336Z :DEBUG: [] MessageGroupId [src] SessionId [src|18dfadac-d9fec484-23ef5007-b071523f_0] Write session: destroy 2025-07-08T12:01:08.837501Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|18dfadac-d9fec484-23ef5007-b071523f_0 grpc read done: success: 0 data: 2025-07-08T12:01:08.837511Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|18dfadac-d9fec484-23ef5007-b071523f_0 grpc read failed 2025-07-08T12:01:08.837518Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|18dfadac-d9fec484-23ef5007-b071523f_0 grpc closed 2025-07-08T12:01:08.837522Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|18dfadac-d9fec484-23ef5007-b071523f_0 is DEAD 2025-07-08T12:01:08.837806Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:08.838068Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7524679917130084186:2444] destroyed 2025-07-08T12:01:08.838085Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. PORTS 24807 15164 Session was created >>> Ready to answer: ok 2025-07-08T12:01:09.869460Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-07-08T12:01:09.869501Z :INFO: [/Root] [] [fdda164a-fb6dad37-34bec08b-c995d027] Open read subsessions to databases: { name: , endpoint: localhost:15164, path: /Root } 2025-07-08T12:01:09.869542Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Starting read session 2025-07-08T12:01:09.869547Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Starting single session 2025-07-08T12:01:09.869833Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-07-08T12:01:09.869839Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-07-08T12:01:09.869844Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] Reconnecting session to cluster in 0.000000s 2025-07-08T12:01:09.869897Z :ERROR: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:15164
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:15164. 2025-07-08T12:01:09.869905Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-07-08T12:01:09.869907Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-07-08T12:01:09.869924Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:15164" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:15164
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:15164. " } 2025-07-08T12:01:09.870039Z :NOTICE: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:01:09.870046Z :DEBUG: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:15164" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:15164
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:15164. " } 2025-07-08T12:01:09.870064Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Closing read session. Close timeout: 0.010000s 2025-07-08T12:01:09.870071Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-07-08T12:01:09.870077Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:09.870082Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:09.870084Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-07-08T12:01:09.870086Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:09.870090Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:09.870092Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-07-08T12:01:09.870096Z :INFO: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:09.870100Z :NOTICE: [/Root] [/Root] [805d4c76-77022bdf-fa9477e8-bc8e9f2e] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpScan::DecimalColumn >> KqpScan::RightSemiJoinSimple >> KqpScan::IsNull >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> KqpScan::DqSourceFullScan |69.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |69.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> KqpSplit::AfterResolve+Unspecified [GOOD] >> KqpSplit::AfterResult+Ascending >> KqpScan::UnionWithPureExpr >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> KqpScan::SelfJoin3xSameLabels [GOOD] >> KqpScan::SelfJoin3x >> KqpSplit::IntersectionLosesRange+Ascending [GOOD] >> KqpSplit::IntersectionLosesRange+Descending >> KqpScan::NullInKey >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> KqpScan::AggregateWithFunction [GOOD] >> KqpScan::CountDistinct >> KqpScan::EarlyFinish >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore |69.1%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckGetHardenedErasureBlock42Count6Idx0 [GOOD] Test command err: iteration# 0 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 6 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 12 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 18 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 24 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 30 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 36 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 42 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 48 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 54 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 60 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 66 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 72 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 78 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 84 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 90 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 96 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 102 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 108 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 114 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 120 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 126 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 132 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 138 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 144 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 150 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 156 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 162 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 168 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 174 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 180 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 186 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 192 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 198 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 204 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 210 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 216 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 222 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 228 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 234 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 240 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 246 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 252 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 258 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 264 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 270 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 276 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 282 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 288 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 294 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 300 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 306 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 312 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 318 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 324 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 330 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 336 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 342 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 348 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 354 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 360 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 366 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 372 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 378 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 384 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 390 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 396 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 402 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 408 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 414 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 420 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 426 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 432 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 438 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 444 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 450 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 456 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 462 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 468 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 474 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 480 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 486 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 492 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 498 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 504 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 510 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 516 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 522 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 528 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 534 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 540 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 546 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 552 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 558 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 564 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 570 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 576 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 582 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 588 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 594 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 600 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 606 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 612 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 618 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 624 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 630 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 636 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 642 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 648 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 654 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 660 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 666 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 672 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 678 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 684 BlobsWritten# 2041 blobsWrittenFul ... blobsUnwritten# 1218 iteration# 1368 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1374 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1380 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1386 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1392 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1398 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1404 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1410 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1416 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1422 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1428 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1434 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1440 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1446 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1452 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1458 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1464 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1470 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1476 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1482 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1488 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1494 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1500 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1506 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1512 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1518 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1524 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1530 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1536 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1542 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1548 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1554 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1560 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1566 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1572 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1578 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1584 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1590 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1596 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1602 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1608 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1614 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1620 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1626 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1632 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1638 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1644 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1650 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1656 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1662 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1668 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1674 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1680 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1686 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1692 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1698 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1704 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1710 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1716 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1722 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1728 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1734 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1740 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1746 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1752 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1758 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1764 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1770 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1776 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1782 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1788 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1794 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1800 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1806 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1812 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1818 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1824 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1830 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1836 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1842 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1848 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1854 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1860 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1866 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1872 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1878 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1884 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1890 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1896 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1902 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1908 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1914 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1920 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1926 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1932 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1938 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1944 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1950 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1956 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1962 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1968 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1974 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1980 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1986 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1992 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 1998 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2004 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2010 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2016 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2022 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2028 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2034 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 iteration# 2040 BlobsWritten# 2041 blobsWrittenFull# 157 blobsWrittenAlmostFull# 666 blobsUnwritten# 1218 >> KqpSplit::UndeliveryOnFinishedRead [FAIL] >> KqpSplit::StreamLookupSplitBeforeReading >> KqpScan::RightSemiJoinSimple [GOOD] >> KqpScan::SecondaryIndex >> KqpScan::DecimalColumn [GOOD] >> KqpScan::CustomWindow >> KqpScan::StreamLookupByPkPrefix >> KqpScan::IsNull [GOOD] >> KqpScan::IsNullPartial >> KqpScan::TaggedScalar |69.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScan::UnionWithPureExpr [GOOD] >> KqpScan::YqlTableSample >> KqpScan::DqSourceFullScan [GOOD] >> KqpScan::DqSource >> KqpSplit::IntersectionLosesRange+Descending [GOOD] >> KqpSplit::AfterResult+Ascending [GOOD] >> KqpScan::SelfJoin3x [GOOD] >> KqpScan::CountDistinct [GOOD] >> KqpScan::BoolFlag >> KqpScan::NullInKey [GOOD] >> KqpScan::NullInKeySuffix >> KqpScan::CustomWindow [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> KqpScan::CrossJoinOneColumn >> KqpScan::TaggedScalar [GOOD] >> KqpScan::TooManyComputeActors >> KqpScan::YqlTableSample [GOOD] >> BasicUsage::ReadMirrored [GOOD] >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] >> KqpScan::IsNullPartial [GOOD] >> KqpScan::GrepRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 1860, MsgBus: 10247 2025-07-08T12:01:10.538016Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679924349682268:2167];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:10.538092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001170/r3tmp/tmpo5myLA/pdisk_1.dat 2025-07-08T12:01:10.606660Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1860, node 1 2025-07-08T12:01:10.621184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:10.621201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:10.621204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:10.621253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:10.634679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.634726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.635818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10247 TClient is connected to server localhost:10247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:10.690936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.737700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.774966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.796561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.816468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.931476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.941736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.952059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.008215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.020496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.081011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.108822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.295618Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679928644651922:2445] TxId: 281474976710671. Ctx: { TraceId: 01jzmyjm3abzckanqxpvj728w6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM2NWZiYS05MDQ1MGM0ZC1mZjkzOWY4LTM5N2FiYmRh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database 2025-07-08T12:01:11.295700Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jzmyjm3abzckanqxpvj728w6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTM2NWZiYS05MDQ1MGM0ZC1mZjkzOWY4LTM5N2FiYmRh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-07-08T12:01:11.315052Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976071342, txId: 281474976710670] shutting down Trying to start YDB, gRPC: 22015, MsgBus: 27480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001170/r3tmp/tmpKgNfLa/pdisk_1.dat 2025-07-08T12:01:11.704875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:11.711529Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22015, node 2 2025-07-08T12:01:11.721773Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.721785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.721786Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.721824Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27480 TClient is connected to server localhost:27480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:11.791476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.791509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.791973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.792405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:11.798530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:11.819040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.882956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.892044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.014938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.027026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.042239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.050822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.065796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.081591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.096213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- 2025-07-08T12:01:12.292406Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjn2kdnsvzjk0qk7g47qx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGYxMzY1Y2YtMTI1N2VhYWMtYzgwMGM5ZjEtMWRkZGIyMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:12.687640Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:12.696361Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072336, txId: 281474976715670] shutting down >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> KqpScan::StreamLookupByPkPrefix [GOOD] >> KqpScan::StreamLookupFailedRead >> KqpScan::SecondaryIndexCustomColumnOrder |69.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |69.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::IntersectionLosesRange+Descending [GOOD] Test command err: Trying to start YDB, gRPC: 26410, MsgBus: 25715 2025-07-08T12:01:10.752103Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679924117135265:2066];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:10.752121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00117f/r3tmp/tmpzWlJC7/pdisk_1.dat 2025-07-08T12:01:10.806124Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26410, node 1 2025-07-08T12:01:10.837533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:10.837546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:10.837554Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:10.837615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25715 2025-07-08T12:01:10.858300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.858340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.865466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:10.949887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.001804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.066555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.096014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.160634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.249180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.261498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.274925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.332100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.344391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.358912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.417211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.624774Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679928412105054:2445] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjmct2wmxn2w45bnb6rn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIwN2I0Y2ItMWJkOTFjZjgtNDIxNDYzMWEtNWE5YWFiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database 2025-07-08T12:01:11.624875Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjmct2wmxn2w45bnb6rn0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWIwN2I0Y2ItMWJkOTFjZjgtNDIxNDYzMWEtNWE5YWFiMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-07-08T12:01:11.645800Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976071671, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 21571, MsgBus: 64359 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00117f/r3tmp/tmpT6zGtI/pdisk_1.dat 2025-07-08T12:01:12.038212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:12.052298Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21571, node 2 2025-07-08T12:01:12.066468Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.066483Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.066485Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.066524Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64359 TClient is connected to server localhost:64359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:12.137153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.137187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.137519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.138056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:12.145790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.167415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.201813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.214343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.400898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.408715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.468214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.480160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.493909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.507166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.522167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.761526Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjng394q31ddak1m1xnx9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWYyMzY3NWMtNWEyYmE4NWMtNTMwZmI4MmMtMTE5YWNiYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-07-08T12:01:12.785062Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072805, txId: 281474976715670] shutting down |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> KqpScan::SecondaryIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelfJoin3x [GOOD] Test command err: Trying to start YDB, gRPC: 21878, MsgBus: 26570 2025-07-08T12:01:10.678239Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679927418936213:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:10.679834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116c/r3tmp/tmp9O04PA/pdisk_1.dat 2025-07-08T12:01:10.755413Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21878, node 1 2025-07-08T12:01:10.786050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:10.786062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:10.786065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:10.786118Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26570 2025-07-08T12:01:10.831968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.831995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.833309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:10.871473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.875131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:10.881339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:10.913742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:10.940817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.963489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.077305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.092374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.115323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.127233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.141388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.157968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.190343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.406213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.583979Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679931713906225:2468] TxId: 281474976710673. Ctx: { TraceId: 01jzmyjm91a2pw08tbfn8h5q1q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTg1MWY3YjItZmViZThhYmMtNzgyNGM5MjItZjAwYmRjYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:11.710319Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:11.747179Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976071629, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 18180, MsgBus: 24857 2025-07-08T12:01:12.037036Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679935962110690:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.037053Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116c/r3tmp/tmp8Eh5iG/pdisk_1.dat 2025-07-08T12:01:12.058526Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18180, node 2 2025-07-08T12:01:12.073902Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.073917Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.073919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.073965Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24857 TClient is connected to server localhost:24857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.146302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.146622Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.146645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.148101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:12.150784Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.280841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.295719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.327174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.343091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.384558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.398813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.408075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.431523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.450679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.466671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.523632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.717552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.008253Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072910, txId: 281474976715672] shutting down 2025-07-08T12:01:13.037197Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-07-08T12:01:09.832233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bf5/r3tmp/tmp1SFL95/pdisk_1.dat 2025-07-08T12:01:10.003206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:548:2472], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.003231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.003237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.003266Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:545:2470], Recipient [1:361:2356]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-07-08T12:01:10.003271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T12:01:10.020482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-07-08T12:01:10.020557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.020612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:01:10.020651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:10.020666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.020679Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.020881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.020905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:01:10.020910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.020914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.020944Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021212Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.021263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:01:10.021268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:10.021273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:10.021299Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.021440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.021451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.021479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.021496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:01:10.021500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:10.021511Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.021595Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.021602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.021619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.021631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.021635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.021640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.021643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.021657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:10.022329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:10.022446Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.022458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:10.022504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:10.022763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:553:2477], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:555:2478] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:10.022775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:10.022780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-07-08T12:01:10.022822Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-07-08T12:01:10.022898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:557:2480], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.022904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.022908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.022927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:545:2470], Recipient [1:361:2356]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-07-08T12:01:10.022931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-07-08T12:01:10.022943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.022948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-07-08T12:01:10.022952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.047394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:44:2091], Recipient [1:361:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-07-08T12:01:10.047422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-07-08T12:01:10.047427Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:10.047480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:10.047487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-07-08T12:01:10.079426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.079467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.090122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:10.175397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-07-08T12:01:10.175580Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:573:2493], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.175588Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.175592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.175617Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:361:2356]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-07-08T12:01:10.175622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:01:10.175635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.175665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:10.175677Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.175755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:10.175764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.175785Z node 1 :FLAT_TX ... 2:01:13.038707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715660:0, shardIdx: 72057594046644480:2, shard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:13.038711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715660:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2025-07-08T12:01:13.038715Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:13.038753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.038757Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:13.038794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.038798Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:13.060528Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:13.060559Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715660 state Ready TxInFly 0 2025-07-08T12:01:13.060570Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:13.060578Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:13.060635Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:840:2684], Recipient [2:362:2357]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:13.060641Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:13.060644Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:13.060669Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:667:2556], Recipient [2:362:2357]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 667 RawX2: 8589937148 } Origin: 72075186224037888 State: 2 TxId: 281474976715660 Step: 0 Generation: 1 2025-07-08T12:01:13.060676Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-07-08T12:01:13.060682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 667 RawX2: 8589937148 } Origin: 72075186224037888 State: 2 TxId: 281474976715660 Step: 0 Generation: 1 2025-07-08T12:01:13.060688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715660, tablet: 72075186224037888, partId: 0 2025-07-08T12:01:13.060706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715660:0, at schemeshard: 72057594046644480, message: Source { RawX1: 667 RawX2: 8589937148 } Origin: 72075186224037888 State: 2 TxId: 281474976715660 Step: 0 Generation: 1 2025-07-08T12:01:13.060714Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715660:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2025-07-08T12:01:13.060721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715660:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 667 RawX2: 8589937148 } Origin: 72075186224037888 State: 2 TxId: 281474976715660 Step: 0 Generation: 1 2025-07-08T12:01:13.060738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715660:0, shardIdx: 72057594046644480:1, shard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:13.060740Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.060743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715660:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-07-08T12:01:13.060746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715660:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-07-08T12:01:13.060750Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 129 -> 240 2025-07-08T12:01:13.060769Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:13.060868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.060873Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:13.060877Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715660:0 2025-07-08T12:01:13.060889Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:785:2638] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715660 at schemeshard: 72057594046644480 2025-07-08T12:01:13.060894Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:667:2556] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715660 at schemeshard: 72057594046644480 2025-07-08T12:01:13.060910Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715660 datashard 72075186224037888 state Ready 2025-07-08T12:01:13.060916Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:13.060938Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715660 datashard 72075186224037889 state Ready 2025-07-08T12:01:13.060942Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-07-08T12:01:13.060984Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:362:2357], Recipient [2:362:2357]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:13.060990Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:13.060996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.061002Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715660:0ProgressState, operation type TxCopyTable 2025-07-08T12:01:13.061006Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:13.061011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715660:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-07-08T12:01:13.061015Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 1 2025-07-08T12:01:13.061026Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715660:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715660 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-07-08T12:01:13.061031Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:0 240 -> 240 2025-07-08T12:01:13.061117Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:13.061123Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715660:0 2025-07-08T12:01:13.061140Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:362:2357], Recipient [2:362:2357]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:13.061144Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:13.061149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.061154Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:0 ProgressState 2025-07-08T12:01:13.061165Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:13.061170Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-07-08T12:01:13.061174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 1/1 2025-07-08T12:01:13.061178Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:0 progress is 1/1 2025-07-08T12:01:13.061182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 1/1 2025-07-08T12:01:13.061187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715660, ready parts: 1/1, is published: true 2025-07-08T12:01:13.061199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:765:2622] message: TxId: 281474976715660 2025-07-08T12:01:13.061205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 1/1 2025-07-08T12:01:13.061210Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-07-08T12:01:13.061214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715660:0 2025-07-08T12:01:13.061241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 7] was 3 2025-07-08T12:01:13.061245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-07-08T12:01:13.061324Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:13.061336Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:765:2622] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715660 at schemeshard: 72057594046644480 2025-07-08T12:01:13.061461Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:773:2629], Recipient [2:362:2357]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:13.061467Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:13.061471Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T12:01:13.076407Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:867:2701], serverId# [2:868:2702], sessionId# [0:0:0] 2025-07-08T12:01:13.076450Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyjnv6fbakxff9g65kptjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTZkNWM3ZTctYTkwNzBlMDktYThmM2Y0MDAtMjU2YzJjZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::YqlTableSample [GOOD] Test command err: Trying to start YDB, gRPC: 12616, MsgBus: 61320 2025-07-08T12:01:11.710745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679929438457964:2172];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.710835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001149/r3tmp/tmprTG0Uq/pdisk_1.dat 2025-07-08T12:01:11.765106Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12616, node 1 2025-07-08T12:01:11.783669Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.783679Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.783681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.783704Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61320 2025-07-08T12:01:11.810604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.810628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.811683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:11.838742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.842499Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:11.861982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.927232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.947864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:12.007590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.103399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.112158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.125673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.137213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.157889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.183224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.214261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.421381Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679933733427607:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjn6t84ykh98b16tcrebv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIxMzc0NTAtYzk5OWMyMmYtY2I3MzQwYTItN2I2YjdhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:12.422902Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072421, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 7017, MsgBus: 4669 2025-07-08T12:01:12.653850Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679934591945444:2060];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.654023Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001149/r3tmp/tmpBCIW5C/pdisk_1.dat 2025-07-08T12:01:12.667811Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7017, node 2 2025-07-08T12:01:12.676632Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.676644Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.676646Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.676690Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4669 TClient is connected to server localhost:4669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.758604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.758748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.758773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-07-08T12:01:12.759932Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:12.761469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.771738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.787276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.803365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.968118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.979450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.990127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.002294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.016175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.031666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.044990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.200426Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679938886915194:2447], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-07-08T12:01:13.200511Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGY0ZTMwMzgtZGU1OTEwZjYtYThkN2VjYmEtZDlmYjQzY2I=, ActorId: [2:7524679938886915192:2446], ActorState: ExecuteState, TraceId: 01jzmyjnzd8krsj2vx84s44gae, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> KqpScan::NullInKeySuffix [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-07-08T12:00:53.791452Z :PropagateSessionClosed INFO: Random seed for debugging is 1751976053791448 2025-07-08T12:00:53.929859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679854716407318:2170];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.937530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.947018Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679852949409837:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.947148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001f01/r3tmp/tmpJcUBX3/pdisk_1.dat 2025-07-08T12:00:53.979380Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:53.982725Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:54.005871Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23064, node 1 2025-07-08T12:00:54.028245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.028271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.029983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:54.033131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001f01/r3tmp/yandexyZ80su.tmp 2025-07-08T12:00:54.033143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001f01/r3tmp/yandexyZ80su.tmp 2025-07-08T12:00:54.038438Z INFO: TTestServer started on Port 5424 GrpcPort 23064 TClient is connected to server localhost:5424 PQClient connected to localhost:23064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:00:54.054522Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001f01/r3tmp/yandexyZ80su.tmp 2025-07-08T12:00:54.054611Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:54.076036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.076065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.082130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.082260Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:54.082580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-07-08T12:00:54.093194Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:54.160489Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-07-08T12:00:54.334415Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679859011375367:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.334524Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2E3NjBjN2ItY2MyZjM5NC1mOTY4NzVhZi1kOGZjNjIzMA==, ActorId: [1:7524679859011375364:2290], ActorState: ExecuteState, TraceId: 01jzmyj3gxd7ww4d6amatqfp49, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.334919Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.335065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.335284Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679857244377353:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.335583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDk5YTZiNGQtODliODQxY2EtYWMwODYxNGItNTU5MmYwNA==, ActorId: [2:7524679857244377351:2262], ActorState: ExecuteState, TraceId: 01jzmyj3h027x244hcq4pa73vt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.335674Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.418538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.463961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:23064", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:54.528739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyj3q856re5a6r9x903vfn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJjY2Y0ZS0xYWQzMGIzZi0xZDQ1MmE5MC1hZWQzOTgz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679859011375743:2872] 2025-07-08T12:00:54.930783Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:54.945132Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:58.931759Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679854716407318:2170];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.931791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:58.947572Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679852949409837:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.947599Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:00:59.645874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:23064 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:00:59.721025Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:23064 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976715677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:00:59.759759Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679880486212780:3215] connected; active ... oot] Decompression task done. Partition/PartitionSessionId: 1 (0-0) 2025-07-08T12:01:12.721179Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] Returning serverBytesSize = 180 to budget 2025-07-08T12:01:12.721185Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] In ContinueReadingDataImpl, ReadSizeBudget = 180, ReadSizeServerDelta = 8386978 2025-07-08T12:01:12.721260Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 8387158 2025-07-08T12:01:12.721275Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-07-08T12:01:12.721296Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-07-08T12:01:12.721300Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] Returning serverBytesSize = 1450 to budget 2025-07-08T12:01:12.721305Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (1-1) 2025-07-08T12:01:12.721319Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 1} (2-2) 2025-07-08T12:01:12.721328Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 2} (3-3) 2025-07-08T12:01:12.721332Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 3} (4-4) >>> event from dataHandler: DataReceived { Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..20 bytes.. Information: { Offset: 0 ProducerId: "src_id" SeqNo: 1 CreateTime: 2025-07-08T12:01:12.714000Z WriteTime: 2025-07-08T12:01:12.715000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "_ip": "ipv6:[::1]:55340", "server": "ipv6:[::1]:55340" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 1 messages in this event 2025-07-08T12:01:12.721413Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] The application data is transferred to the client. Number of messages 1, size 20 bytes 2025-07-08T12:01:12.721422Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] Returning serverBytesSize = 0 to budget >>> event from dataHandler: DataReceived { Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 Message { Data: ..130 bytes.. Information: { Offset: 1 ProducerId: "src_id" SeqNo: 2 CreateTime: 2025-07-08T12:01:12.714000Z WriteTime: 2025-07-08T12:01:12.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55340", "_ip": "ipv6:[::1]:55340" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..240 bytes.. Information: { Offset: 2 ProducerId: "src_id" SeqNo: 3 CreateTime: 2025-07-08T12:01:12.714000Z WriteTime: 2025-07-08T12:01:12.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55340", "_ip": "ipv6:[::1]:55340" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..350 bytes.. Information: { Offset: 3 ProducerId: "src_id" SeqNo: 4 CreateTime: 2025-07-08T12:01:12.714000Z WriteTime: 2025-07-08T12:01:12.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55340", "_ip": "ipv6:[::1]:55340" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } Message { Data: ..460 bytes.. Information: { Offset: 4 ProducerId: "src_id" SeqNo: 5 CreateTime: 2025-07-08T12:01:12.714000Z WriteTime: 2025-07-08T12:01:12.717000Z MessageGroupId: "src_id" Meta: { "ident": "unknown", "logtype": "unknown", "server": "ipv6:[::1]:55340", "_ip": "ipv6:[::1]:55340" } MessageMeta: { } } Partition session id: 3 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-07-08T12:01:12.721520Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-07-08T12:01:12.721527Z :DEBUG: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] [] Returning serverBytesSize = 0 to budget 2025-07-08T12:01:12.721768Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 grpc read done: success# 1, data# { read_request { bytes_size: 180 } } 2025-07-08T12:01:12.721840Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 got read request: guid# a913432a-302573f0-f12944b-5e00c76f 2025-07-08T12:01:12.815826Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|c70dcf34-9215dcf-259f071d-433de0c8_0] Write session will now close 2025-07-08T12:01:12.815848Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|c70dcf34-9215dcf-259f071d-433de0c8_0] Write session: aborting 2025-07-08T12:01:12.816031Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|c70dcf34-9215dcf-259f071d-433de0c8_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-07-08T12:01:12.816044Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|c70dcf34-9215dcf-259f071d-433de0c8_0] Write session: destroy 2025-07-08T12:01:12.816093Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Closing read session. Close timeout: 18446744073709.551615s 2025-07-08T12:01:12.816111Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic:0:1:4:0 2025-07-08T12:01:12.816119Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 376 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:12.816266Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:12.816273Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic:0:1:4:0 2025-07-08T12:01:12.816277Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 376 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:12.816296Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:12.816300Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc3:0:3:4:0 -:test-topic-mirrored-from-dc2:0:2:4:0 -:test-topic:0:1:4:0 2025-07-08T12:01:12.816302Z :INFO: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Counters: { Errors: 0 CurrentSessionLifetimeMs: 376 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:12.816315Z :NOTICE: [/Root] [/Root] [9564e7af-78cad58e-a1607dd1-6ee43ecd] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:01:12.816362Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|c70dcf34-9215dcf-259f071d-433de0c8_0 grpc read done: success: 0 data: 2025-07-08T12:01:12.816379Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c70dcf34-9215dcf-259f071d-433de0c8_0 grpc read failed 2025-07-08T12:01:12.816392Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c70dcf34-9215dcf-259f071d-433de0c8_0 grpc closed 2025-07-08T12:01:12.816399Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|c70dcf34-9215dcf-259f071d-433de0c8_0 is DEAD 2025-07-08T12:01:12.816440Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 grpc read done: success# 0, data# { } 2025-07-08T12:01:12.816445Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 grpc read failed 2025-07-08T12:01:12.816453Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 grpc closed 2025-07-08T12:01:12.816475Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_1978223829665083172_v1 is DEAD 2025-07-08T12:01:12.816663Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:12.816704Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7524679935421098691:2521] disconnected; active server actors: 1 2025-07-08T12:01:12.816708Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7524679935421098691:2521] client user disconnected session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816712Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7524679935421098693:2521] disconnected; active server actors: 1 2025-07-08T12:01:12.816715Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7524679935421098693:2521] client user disconnected session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816729Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679935421098692:2521] disconnected; active server actors: 1 2025-07-08T12:01:12.816732Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679935421098692:2521] client user disconnected session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816821Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7524679935421098829:2543] destroyed 2025-07-08T12:01:12.816837Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816843Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7524679935421098700:2526] destroyed 2025-07-08T12:01:12.816851Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816854Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7524679935421098702:2528] destroyed 2025-07-08T12:01:12.816860Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816862Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7524679935421098701:2527] destroyed 2025-07-08T12:01:12.816877Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-07-08T12:01:12.816914Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816924Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_1978223829665083172_v1 2025-07-08T12:01:12.816926Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_1978223829665083172_v1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 4028, MsgBus: 63547 2025-07-08T12:01:11.125003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679930183493309:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.125030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001160/r3tmp/tmp6quDyD/pdisk_1.dat 2025-07-08T12:01:11.192425Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4028, node 1 2025-07-08T12:01:11.208581Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.208591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.208594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.208629Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:11.226410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.226443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.227539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63547 TClient is connected to server localhost:63547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:11.330023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.333130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:11.400121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.492802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.538987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.556510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.626492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.650972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.672819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.683574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.693788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.709786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.724013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.882682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.033379Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679934478463353:2468] TxId: 281474976715673. Ctx: { TraceId: 01jzmyjmrx8g936rrygna690sc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI5NDk3ZWEtZGQ1ZWY0MmMtZjY0YzcyZjUtOTU2ZWIwYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:12.037580Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072070, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 28392, MsgBus: 8118 2025-07-08T12:01:12.344116Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679934021812534:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.345833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001160/r3tmp/tmpSVSpeV/pdisk_1.dat 2025-07-08T12:01:12.356749Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28392, node 2 2025-07-08T12:01:12.368000Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.368012Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.368015Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.368044Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8118 TClient is connected to server localhost:8118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.443966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.443994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.444322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.445192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:12.448599Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.459829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.476260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.496454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.506333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.704795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.717489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.733018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.754120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.776081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.800858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.816134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.997544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.006736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.016586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.249057Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073295, txId: 281474976715676] shutting down 2025-07-08T12:01:13.310036Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073351, txId: 281474976715678] shutting down 2025-07-08T12:01:13.343908Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:13.345637Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073386, txId: 281474976715680] shutting down 2025-07-08T12:01:13.399642Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073442, txId: 281474976715682] shutting down >> KqpScan::DqSource [GOOD] >> KqpScan::DqSourceLiteralRange >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> KqpScan::BoolFlag [GOOD] >> KqpScan::TooManyComputeActors [GOOD] >> KqpScan::GrepRange [GOOD] >> TxUsage::TestRetentionOnLongTxAndBigMessages [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-07-08T12:00:54.266237Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1751976054266231 2025-07-08T12:00:54.446951Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679855916237701:2196];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:54.449335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679856787690882:2180];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:54.450063Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:54.447479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001ee5/r3tmp/tmpyTu9a2/pdisk_1.dat 2025-07-08T12:00:54.485334Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:54.501848Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:54.529361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:54.547505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.547535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12513, node 1 2025-07-08T12:00:54.553438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:54.569170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001ee5/r3tmp/yandexs1c2eF.tmp 2025-07-08T12:00:54.569184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001ee5/r3tmp/yandexs1c2eF.tmp 2025-07-08T12:00:54.569624Z INFO: TTestServer started on Port 23230 GrpcPort 12513 2025-07-08T12:00:54.571810Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001ee5/r3tmp/yandexs1c2eF.tmp 2025-07-08T12:00:54.571870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23230 PQClient connected to localhost:12513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:00:54.587962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.587988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.590824Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:54.591293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:54.594495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:00:54.620494Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:54.630218Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720659, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:00:54.842178Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679855916238412:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.842642Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTBmMDIxMmUtZDYwZjA0YmItOTVhYmI0NzAtMjRlYzNlY2Y=, ActorId: [1:7524679855916238409:2290], ActorState: ExecuteState, TraceId: 01jzmyj40tby18nnq09hy85hga, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.843434Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679856787690984:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.843503Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjkzYjFjOTktYjg1NmQ2ZWYtOTQ3MzFjMzAtMzY5YWMxNjc=, ActorId: [2:7524679856787690982:2262], ActorState: ExecuteState, TraceId: 01jzmyj417ad1d85fhqyx0h2zs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.844344Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.844495Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.844799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.911568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.980774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:12513", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:55.076513Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jzmyj48e3sn7qn1ymtqq28zn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRmZTJlNjYtZThiZjU1NzktOTFkYjJhNDAtMzk4YTJlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679860211206089:2862] 2025-07-08T12:00:55.445513Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:55.449551Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:59.449016Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679855916237701:2196];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:59.449051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:59.449752Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679856787690882:2180];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:59.449782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:01:00.205186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:12513 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:01:00.273521Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:12513 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976720677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:01:00.314449Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720678. Ctx: { TraceId: 01jzmyj9c2d4e83tk1aa1tvq5d, D ... } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:14550 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:01:09.446199Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:14550 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2025-07-08T12:01:09.953129Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC request to localhost:14550 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-07-08T12:01:10.455513Z node 3 :PERSQUEUE INFO: proxy answer 2025-07-08T12:01:10.459232Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-07-08T12:01:10.459596Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-07-08T12:01:10.459603Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:14550 2025-07-08T12:01:10.460114Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-07-08T12:01:10.460291Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:01:10.460306Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-07-08T12:01:10.460495Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-07-08T12:01:10.460534Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:59256 2025-07-08T12:01:10.460540Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:59256 proto=v1 topic=test-topic durationSec=0 2025-07-08T12:01:10.460544Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:10.461005Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-07-08T12:01:10.461043Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-07-08T12:01:10.461045Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:10.461047Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:10.461054Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-07-08T12:01:10.461564Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-07-08T12:01:10.484065Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-07-08T12:01:10.484177Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679924674062808:2461] connected; active server actors: 1 2025-07-08T12:01:10.484197Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-07-08T12:01:10.484200Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-07-08T12:01:10.484267Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679924674062808:2461] disconnected; active server actors: 1 2025-07-08T12:01:10.484275Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7524679924674062808:2461] disconnected no session 2025-07-08T12:01:10.501355Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-07-08T12:01:10.501376Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-07-08T12:01:10.501381Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524679924674062776:2461] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-07-08T12:01:10.501394Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-07-08T12:01:10.501763Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-07-08T12:01:10.503739Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7524679924674062826:2461], now have 1 active actors on pipe 2025-07-08T12:01:10.503777Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:10.503785Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:10.503825Z node 4 :PERSQUEUE INFO: new Cookie src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-07-08T12:01:10.503857Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-07-08T12:01:10.503876Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:10.504425Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-07-08T12:01:10.504434Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-07-08T12:01:10.504453Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:10.504637Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 2025-07-08T12:01:10.505139Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976070505 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:10.505173Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-07-08T12:01:10.505299Z :INFO: [] MessageGroupId [src] SessionId [src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0] Write session: close. Timeout = 0 ms 2025-07-08T12:01:10.505304Z :INFO: [] MessageGroupId [src] SessionId [src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0] Write session will now close 2025-07-08T12:01:10.505308Z :DEBUG: [] MessageGroupId [src] SessionId [src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0] Write session: aborting 2025-07-08T12:01:10.505399Z :INFO: [] MessageGroupId [src] SessionId [src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:10.505404Z :DEBUG: [] MessageGroupId [src] SessionId [src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0] Write session: destroy 2025-07-08T12:01:10.505904Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 grpc read done: success: 0 data: 2025-07-08T12:01:10.505912Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 grpc read failed 2025-07-08T12:01:10.505919Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 grpc closed 2025-07-08T12:01:10.505924Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|55ebc02e-5b5ba41a-24f439fa-fadb2cf_0 is DEAD 2025-07-08T12:01:10.506080Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:10.506538Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7524679924674062826:2461] destroyed 2025-07-08T12:01:10.506552Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-07-08T12:01:10.561126Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::NullInKeySuffix [GOOD] Test command err: Trying to start YDB, gRPC: 10670, MsgBus: 63317 2025-07-08T12:01:12.081017Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679935765305105:2233];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.081589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001158/r3tmp/tmpYDzrl9/pdisk_1.dat 2025-07-08T12:01:12.169518Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10670, node 1 2025-07-08T12:01:12.178555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.178581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.179941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.187158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.187168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.187170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.187207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63317 TClient is connected to server localhost:63317 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.317489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.325872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.361875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.393949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.406091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.638360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.649292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.660666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.676794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.698927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.713409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.730365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.884616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.936140Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072980, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 13202, MsgBus: 15703 2025-07-08T12:01:13.235782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679939288871750:2163];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:13.235878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001158/r3tmp/tmpDW6r7x/pdisk_1.dat 2025-07-08T12:01:13.254087Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13202, node 2 2025-07-08T12:01:13.267228Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.267241Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.267244Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.267288Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15703 TClient is connected to server localhost:15703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.341260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.341296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.341637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.342724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:13.347143Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:13.361846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.397897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.420701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.442344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.653867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.689090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.706572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.729053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.759680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.776894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.791806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.950987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.005619Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074051, txId: 281474976715672] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::BoolFlag [GOOD] Test command err: Trying to start YDB, gRPC: 27561, MsgBus: 15616 2025-07-08T12:01:10.833902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679926094536745:2093];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:10.835472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001181/r3tmp/tmp3pdGWU/pdisk_1.dat 2025-07-08T12:01:10.902679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27561, node 1 2025-07-08T12:01:10.920916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:10.920928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:10.920930Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:10.920994Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15616 2025-07-08T12:01:10.931829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.931848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.933265Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:10.977900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.017785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.050221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.082350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.102197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.303668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.312576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.372443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.386185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.444173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.458979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.471163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.775212Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679930389506520:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjmh9a9q8py43jr5a8k0g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU5NTRjYS0yNmEyOGFhNC1jYjMxYTllOC1iNmMxM2Q2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:11.836414Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:11.878879Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679930389506598:2446] TxId: 281474976715672. Ctx: { TraceId: 01jzmyjmh9a9q8py43jr5a8k0g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWU5NTRjYS0yNmEyOGFhNC1jYjMxYTllOC1iNmMxM2Q2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:11.879966Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976071818, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 9770, MsgBus: 28555 2025-07-08T12:01:12.104935Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679935668049428:2060];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.104967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001181/r3tmp/tmplETdyU/pdisk_1.dat 2025-07-08T12:01:12.126687Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9770, node 2 2025-07-08T12:01:12.145626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.145648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.145649Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.145684Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28555 TClient is connected to server localhost:28555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.209692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.209724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.210016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.211578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.241441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.272191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.316427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.331823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.441243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.450324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.472634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.530128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.593221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.611030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.629039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.015383Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072966, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 9292, MsgBus: 17669 2025-07-08T12:01:13.247408Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679940111882053:2170];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:13.248095Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001181/r3tmp/tmpANYFgS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9292, node 3 2025-07-08T12:01:13.269236Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:13.273874Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.273885Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.273887Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.274041Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17669 TClient is connected to server localhost:17669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:13.350067Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.350097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.350428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.351031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:13.352453Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:13.400003Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.421240Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.452231Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.472527Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.686397Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.767042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.776873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.793225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.802242Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.815728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.832267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.987223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.074263Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074114, txId: 281474976715674] shutting down 2025-07-08T12:01:14.246856Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::TooManyComputeActors [GOOD] Test command err: Trying to start YDB, gRPC: 22970, MsgBus: 25930 2025-07-08T12:01:12.487727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679935041867256:2062];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.487753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00113f/r3tmp/tmph9HMmd/pdisk_1.dat 2025-07-08T12:01:12.545965Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22970, node 1 2025-07-08T12:01:12.565909Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.565920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.565922Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.565957Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25930 TClient is connected to server localhost:25930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.622027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.622049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.622518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.623540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:12.624444Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.632158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.696217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.734723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.796786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.871821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.879403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.890253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.897334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.911854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.926205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.940864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.113051Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679939336837014:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjnwh9skyxs2p1reg8mtz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUyYWNhOTEtN2JmZGI5MTktZjU1YjNiMTItNWUwMzA2NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:13.114209Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073112, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 20751, MsgBus: 62011 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00113f/r3tmp/tmpaKSfsK/pdisk_1.dat 2025-07-08T12:01:13.452565Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:13.452734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 20751, node 2 2025-07-08T12:01:13.465481Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.465493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.465495Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.465532Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62011 TClient is connected to server localhost:62011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.548073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.548095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.548354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.549045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:13.559013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.568158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.592274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:13.604372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.825762Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.837969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.849650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.857705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.872974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.890684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.908767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.085706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting...
: Warning: Type annotation, code: 1030
:7:13: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:8:18: Warning: At function: AssumeColumnOrderPartial, At function: Aggregate, At function: Filter, At function: Coalesce
:9:67: Warning: At function: And
:9:39: Warning: At function: <
:9:46: Warning: At function: -
:9:46: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
: Warning: Execution, code: 1060
:4:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Error: Requested too many execution units: 22, code: 2029 2025-07-08T12:01:14.230437Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mjc2ODI2YjItZDE1N2U2NTgtMzU1Njc4NDctOGVlMDMxZjc=, ActorId: [2:7524679942947468942:2468], ActorState: ExecuteState, TraceId: 01jzmyjpw629ythkqfqbgab5e6, Create QueryResponse for error on request, msg: 2025-07-08T12:01:14.230598Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074275, txId: 281474976715672] shutting down >> KqpSplit::AfterResult+Descending >> KqpScan::StreamLookupFailedRead [GOOD] >> KqpScan::CrossJoinOneColumn [GOOD] >> KqpScan::UnionAggregate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepRange [GOOD] Test command err: Trying to start YDB, gRPC: 17055, MsgBus: 19318 2025-07-08T12:01:11.375043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679932561914839:2065];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.375053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001162/r3tmp/tmpvRPZg7/pdisk_1.dat 2025-07-08T12:01:11.454956Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17055, node 1 2025-07-08T12:01:11.485123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.485136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.485138Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.485183Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19318 TClient is connected to server localhost:19318 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:11.517334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.517360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.518461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:11.535692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.537911Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.565501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.601685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.678154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.693822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.850146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.859364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.870206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.883362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.899693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.913007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.927001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.111384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.174184Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072217, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 17679, MsgBus: 6440 2025-07-08T12:01:12.456545Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679936087578273:2159];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.457394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001162/r3tmp/tmpEfRCDg/pdisk_1.dat 2025-07-08T12:01:12.474999Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17679, node 2 2025-07-08T12:01:12.501229Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.501245Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.501247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.501307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6440 TClient is connected to server localhost:6440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.555706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.555734Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.556048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.557609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.557861Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.563974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:12.581443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.646894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.659689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.819328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.833334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.843932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.855840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.869467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.888750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.901662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.076713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.134598Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073176, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 3338, MsgBus: 31033 2025-07-08T12:01:13.500315Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679938125651032:2211];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:13.501433Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001162/r3tmp/tmpWvkPv1/pdisk_1.dat 2025-07-08T12:01:13.555526Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3338, node 3 2025-07-08T12:01:13.578187Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.578201Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.578204Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.578256Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31033 2025-07-08T12:01:13.617326Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.617355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.619016Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.661552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.665185Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:13.708908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:13.730846Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.789753Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.811605Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.928102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.940492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.950893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.962730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.976366Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.034590Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.046519Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.289918Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074331, txId: 281474976715670] shutting down >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> KqpScan::DqSourceLiteralRange [GOOD] >> KqpSplit::StreamLookupSplitAfterFirstResult >> KqpScan::PureExpr >> KqpScan::AggregateNoColumn >> KqpScan::SecondaryIndexCustomColumnOrder [GOOD] >> KqpScan::SelectExistsUnexpected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::StreamLookupFailedRead [GOOD] Test command err: Trying to start YDB, gRPC: 7525, MsgBus: 24040 2025-07-08T12:01:12.493926Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679935718195427:2182];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.493991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001145/r3tmp/tmpVmVH8q/pdisk_1.dat 2025-07-08T12:01:12.547923Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7525, node 1 2025-07-08T12:01:12.560065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.560075Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.560077Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.560108Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24040 TClient is connected to server localhost:24040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.621859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.624536Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.626192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.626208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.627455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.627938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.646974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.668381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.676980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.882218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.890173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.904322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.918487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.932488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.947386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.965526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.148301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.183040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.261454Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679940013165476:2483] TxId: 281474976710675. Ctx: { TraceId: 01jzmyjnzx7zss5zkef3e1900d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRmOThlYzQtY2U2Mjg4NWEtNzA0ZWFhMTgtYWQ2ZjNkNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:13.265124Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073302, txId: 281474976710674] shutting down 2025-07-08T12:01:13.917239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001145/r3tmp/tmptH8pjo/pdisk_1.dat 2025-07-08T12:01:14.020677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.035659Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:14.069423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:14.069463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:14.079973Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:14.173785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.948879Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jzmyjqm3b1gbvcyevahcg66f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:14.948976Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=1;ch_limit=8388608;inputs=0;input_channels_count=0; 2025-07-08T12:01:14.949062Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=1; 2025-07-08T12:01:14.949092Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=1; 2025-07-08T12:01:14.949118Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:148;event=channel_info;ch_size=8388608;ch_count=2;ch_limit=8388608;inputs=1;input_channels_count=1; 2025-07-08T12:01:14.949168Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3167:4348], TxId: 281474976715658, task: 1. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. Start compute actor [2:3167:4348], task: 1 2025-07-08T12:01:14.949179Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3167:4348], TxId: 281474976715658, task: 1. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. Set execution timeout 299.999900s 2025-07-08T12:01:14.949374Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Start compute actor [2:3168:4349], task: 2 2025-07-08T12:01:14.949386Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Set execution timeout 299.999900s 2025-07-08T12:01:14.951638Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Create transform for input 0 Channels { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 3167 RawX2: 8589938940 } } DstEndpoint { } InMemory: true } UnionAll { } Transform { Type: "StreamLookupInputTransformer" InputType: "\037\002\006Key\213\004\207\205\002\203\004\002\205\004?\000?\000\002\024Value/" OutputType: "\037\004\006Key\nValue\213\004\205\004\203\004?\000\002\006\207\205\004\207?\000?\004\002\006/" Settings { [type.googleapis.com/NKikimrKqp.TKqpStreamLookupSettings] { Table { Path: "/Root/Table1" OwnerId: 72057594046644480 TableId: 2 Version: 1 } KeyColumns { Name: "Key" Id: 1 TypeId: 2 } Columns { Name: "Key" Id: 1 TypeId: 2 } Columns { Name: "Value" Id: 2 TypeId: 2 } Snapshot { Step: 1000 TxId: 18446744073709551615 } LookupKeyColumns: "Key" LookupStrategy: JOIN KeepRowsOrder: false AllowNullKeysPrefixSize: 0 LockMode: OPTIMISTIC } } } 2025-07-08T12:01:14.951767Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id ... : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646923 2025-07-08T12:01:14.958464Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 276037645 2025-07-08T12:01:14.958471Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958514Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958520Z node 2 :KQP_COMPUTE DEBUG: StreamLookupActor, inputIndex: 0, CA Id [2:3168:4349]Returned 0 bytes, 0 rows, finished: 0 2025-07-08T12:01:14.958531Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646927 2025-07-08T12:01:14.958552Z node 2 :KQP_COMPUTE DEBUG: StreamLookupActor, inputIndex: 0, CA Id [2:3168:4349]Recv TEvReadResult (stream lookup) from ShardID=72075186224037888, Table = /Root/Table1, ReadId=3 (current ReadId=3), SeqNo=1, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-07-08T12:01:14.958561Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646927 2025-07-08T12:01:14.958568Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958580Z node 2 :KQP_COMPUTE DEBUG: StreamLookupActor, inputIndex: 0, CA Id [2:3168:4349]Returned 10 bytes, 1 rows, finished: 1 2025-07-08T12:01:14.958603Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T12:01:14.958609Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646923 2025-07-08T12:01:14.958617Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 3. Finish input channelId: 2, from: [2:3168:4349] 2025-07-08T12:01:14.958629Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 276037645 2025-07-08T12:01:14.958633Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958654Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T12:01:14.958661Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3170:4351], TxId: 281474976715658, task: 4. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646923 2025-07-08T12:01:14.958666Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 4. Finish input channelId: 3, from: [2:3169:4350] 2025-07-08T12:01:14.958677Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958685Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-07-08T12:01:14.958691Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 2. Tasks execution finished, waiting for chunk delivery in output channelId: 2, seqNo: [2] 2025-07-08T12:01:14.958696Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646927 2025-07-08T12:01:14.958701Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3170:4351], TxId: 281474976715658, task: 4. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958717Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3170:4351], TxId: 281474976715658, task: 4. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-07-08T12:01:14.958739Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646927 2025-07-08T12:01:14.958745Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958750Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [2] 2025-07-08T12:01:14.958754Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 3. Tasks execution finished 2025-07-08T12:01:14.958758Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3169:4350], TxId: 281474976715658, task: 3. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. Compute state finished. All channels and sinks finished 2025-07-08T12:01:14.958782Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 3. pass away 2025-07-08T12:01:14.958812Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715658;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T12:01:14.958871Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958879Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-07-08T12:01:14.958883Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 2. Tasks execution finished 2025-07-08T12:01:14.958887Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3168:4349], TxId: 281474976715658, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CustomerSuppliedId : . TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Compute state finished. All channels and sinks finished 2025-07-08T12:01:14.958915Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 2. pass away 2025-07-08T12:01:14.958941Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715658;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-07-08T12:01:14.958968Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3170:4351], TxId: 281474976715658, task: 4. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. CA StateFunc 271646922 2025-07-08T12:01:14.958974Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [1] 2025-07-08T12:01:14.958978Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 4. Tasks execution finished 2025-07-08T12:01:14.958981Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:3170:4351], TxId: 281474976715658, task: 4. Ctx: { TraceId : 01jzmyjqm3b1gbvcyevahcg66f. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MjcwODNmZjktMjE4OTIyMTUtMjY1ODExZTUtNTY0ODM3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. Compute state finished. All channels and sinks finished 2025-07-08T12:01:14.958991Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715658, task: 4. pass away 2025-07-08T12:01:14.958999Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715658;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> KqpScan::LeftSemiJoinSimple >> KqpScan::Limit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::CrossJoinOneColumn [GOOD] Test command err: Trying to start YDB, gRPC: 20631, MsgBus: 63493 2025-07-08T12:01:11.080005Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679931297178831:2150];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.132312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116a/r3tmp/tmpLuKkaE/pdisk_1.dat 2025-07-08T12:01:11.173429Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20631, node 1 2025-07-08T12:01:11.194499Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.194508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.194510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.194558Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63493 2025-07-08T12:01:11.237268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.237298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.241396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:11.278217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.281356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:11.332038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.410304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.470031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.483096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.561331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.574987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.583048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.597199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.614326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.633290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.648158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.834560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.921652Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679931297181339:2461] TxId: 281474976710673. Ctx: { TraceId: 01jzmyjmp8ccb5n2neytpzk82r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA3ODViMjMtZTJiYjc4MjYtZWI5ZjU3YjAtY2NjZDU3YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:12.003901Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976071965, txId: 281474976710672] shutting down 2025-07-08T12:01:12.046091Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679935592148722:2473] TxId: 281474976710675. Ctx: { TraceId: 01jzmyjmt53jce1qjvf7ng4j07, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE2YTE3OTQtOWFmYTk4ZWItNTU4NDg3OWUtMzQzNDc3ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:12.095894Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:12.144755Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976072091, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 24217, MsgBus: 2038 2025-07-08T12:01:12.416668Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679933409014425:2064];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.417051Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116a/r3tmp/tmp4kXN65/pdisk_1.dat 2025-07-08T12:01:12.436402Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24217, node 2 2025-07-08T12:01:12.447392Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.447406Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.447408Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.447459Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2038 TClient is connected to server localhost:2038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.525519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.525550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.525846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.529339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.529516Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.565775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.584435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.606103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.617565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.807494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.816259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.829590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.841694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.856617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.869458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.883942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.149437Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073190, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 29014, MsgBus: 31353 2025-07-08T12:01:13.395067Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679938894877546:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:13.395098Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116a/r3tmp/tmpLPdNyn/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29014, node 3 2025-07-08T12:01:13.438591Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:13.440395Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.440403Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.440406Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.440455Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31353 TClient is connected to server localhost:31353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.498695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.498728Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.499106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.499822Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:13.504068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.515582Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.535991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.547682Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.784940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.797396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.809362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.866354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.878317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.891803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.908299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.109577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.413567Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:14.450533Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074338, txId: 281474976715672] shutting down 2025-07-08T12:01:14.893960Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074569, txId: 281474976715675] shutting down >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DqSourceLiteralRange [GOOD] Test command err: Trying to start YDB, gRPC: 12273, MsgBus: 62387 2025-07-08T12:01:11.705250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679931247329814:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.705330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00115b/r3tmp/tmpQbN2j0/pdisk_1.dat 2025-07-08T12:01:11.769066Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12273, node 1 2025-07-08T12:01:11.782004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:11.782016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:11.782018Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:11.782054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62387 2025-07-08T12:01:11.804613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.804647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.805770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:11.846698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.851095Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.861955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.926975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.954377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:11.973665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.091464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.153040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.168016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.201209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.212929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.225826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.245953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.559257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.609090Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 3123, MsgBus: 19439 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00115b/r3tmp/tmpKBdG0f/pdisk_1.dat 2025-07-08T12:01:12.962658Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:12.966187Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 3123, node 2 2025-07-08T12:01:12.980475Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.980487Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.980490Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.980540Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19439 TClient is connected to server localhost:19439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.053139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.053166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.053522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.054001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:13.062271Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:13.081677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.098238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.125878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.140615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.456131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.465959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.524227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.540005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.556280Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.612750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.675535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.892832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.952990Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:13.985859Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976074030, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 12016, MsgBus: 22393 2025-07-08T12:01:14.354922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00115b/r3tmp/tmpfGS1K4/pdisk_1.dat 2025-07-08T12:01:14.369937Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12016, node 3 2025-07-08T12:01:14.390156Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:14.390169Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:14.390171Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:14.390224Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22393 2025-07-08T12:01:14.452676Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:14.452707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:14.453788Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:14.465645Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.473384Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:14.479251Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.494131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.527563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.539621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.708997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.717133Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.727572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.739985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.754418Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.767055Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.781492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.990389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.113437Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075143, txId: 281474976715672] shutting down 2025-07-08T12:01:15.152238Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075192, txId: 281474976715674] shutting down >> KqpSplit::AfterResult+Descending [GOOD] >> KqpSplit::AfterResult+Unspecified >> KqpScan::PureExpr [GOOD] >> KqpScan::RestrictSqlV0 >> KikimrIcGateway::TestLoadTableMetadata ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/topic/ut/unittest >> TxUsage::TestRetentionOnLongTxAndBigMessages [GOOD] Test command err: 2025-07-08T11:58:16.789003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679177779896530:2226];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:16.793726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T11:58:16.825262Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b3c/r3tmp/tmpH04GID/pdisk_1.dat 2025-07-08T11:58:16.862620Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24727, node 1 2025-07-08T11:58:16.875668Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001b3c/r3tmp/yandexRHCQm4.tmp 2025-07-08T11:58:16.875685Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001b3c/r3tmp/yandexRHCQm4.tmp 2025-07-08T11:58:16.875747Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001b3c/r3tmp/yandexRHCQm4.tmp 2025-07-08T11:58:16.875802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T11:58:16.879555Z INFO: TTestServer started on Port 29902 GrpcPort 24727 TClient is connected to server localhost:29902 2025-07-08T11:58:16.927109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T11:58:16.927133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T11:58:16.928184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:24727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T11:58:16.967273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:16.973669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-07-08T11:58:17.085617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-07-08T11:58:17.086810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T11:58:17.224473Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679182074864372:2291], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T11:58:17.224858Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmE2ZjM0NmYtYTIwYjVkM2MtYTIxODJhOS0xOTllYWRiOQ==, ActorId: [1:7524679182074864370:2290], ActorState: ExecuteState, TraceId: 01jzmyda2n3g6r8pzxxg39zvnb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T11:58:17.225278Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T11:58:17.230166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.241276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T11:58:17.273694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7524679182074864655:2560] 2025-07-08T11:58:17.773129Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T11:58:21.769462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679177779896530:2226];send_to=[0:7307199536658146131:7762515]; 2025-07-08T11:58:21.769499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T11:58:22.502747Z :TwoSessionOneConsumer INFO: TTopicSdkTestSetup started 2025-07-08T11:58:22.517630Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-07-08T11:58:22.524810Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-07-08T11:58:22.524925Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679203549701439:2732] connected; active server actors: 1 2025-07-08T11:58:22.524996Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2025-07-08T11:58:22.525213Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-07-08T11:58:22.525238Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2025-07-08T11:58:22.525302Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-07-08T11:58:22.525339Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-07-08T11:58:22.525345Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-07-08T11:58:22.525351Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-07-08T11:58:22.525600Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679203549701438:2731], now have 1 active actors on pipe 2025-07-08T11:58:22.525671Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:22.525777Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679203549701480:2399], now have 1 active actors on pipe 2025-07-08T11:58:22.525825Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037892, NodeId 1, Generation 1 2025-07-08T11:58:22.525827Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] TEvClientConnected TabletId 72057594046644480, NodeId 1, Generation 2 2025-07-08T11:58:22.525968Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:22.526439Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [1:7524679203549701491:2760], now have 1 active actors on pipe 2025-07-08T11:58:22.526472Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:22.526703Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:22.526843Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitConfigStep 2025-07-08T11:58:22.526856Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] HandleOnInit TEvPQ::TEvProposePartitionConfig 2025-07-08T11:58:22.526905Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-07-08T11:58:22.526981Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [1:7524679203549701497:2403] 2025-07-08T11:58:22.527205Z node 1 :PERSQUEUE DEBUG: [test-topic:0:Initializer] Initializing completed. 2025-07-08T11:58:22.527209Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'test-topic' partition 0 generation 1 [1:7524679203549701497:2403] 2025-07-08T11:58:22.527216Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateInit] SYNC INIT topic test-topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-07-08T11:58:22.527320Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Process pending events. Count 1 2025-07-08T11:58:22.527352Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'test-topic' partition 0 user test-consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-07-08T11:58:22.527405Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-07-08T11:58:22.527800Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-07-08T11:58:22.530227Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1751975902572, TxId 281474976710671 2025-07-08T11:58:22.530305Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T11:58:22.534424Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T11:58:22.534547Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2 ... questId: cookie: 5 2025-07-08T12:01:12.921092Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 5 } 2025-07-08T12:01:12.921099Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 TopicId: Topic /Root/topic_A in database: Root, partition 0(assignId:1) commit done to position 8 endOffset 8 with cookie 5 2025-07-08T12:01:12.921111Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 replying for commits: assignId# 1, from# 5, to# 5, offset# 8 2025-07-08T12:01:12.921313Z :DEBUG: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] [] Committed response: { partitions_committed_offsets { partition_session_id: 1 committed_offset: 8 } } 2025-07-08T12:01:13.654168Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:7:8 2025-07-08T12:01:13.654195Z :INFO: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1000 BytesRead: 62914628 MessagesRead: 8 BytesReadCompressed: 62914628 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:13.654542Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 checking auth because of timeout 2025-07-08T12:01:13.654592Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 auth for : test-consumer 2025-07-08T12:01:13.654831Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 Handle describe topics response 2025-07-08T12:01:13.654855Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 auth is DEAD 2025-07-08T12:01:13.654877Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 auth ok: topics# 1, initDone# 1 2025-07-08T12:01:13.724296Z node 13 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-07-08T12:01:13.724316Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000006_00011_0000000001_00015(+) to d0000000000_00000000000000000006_00011_0000000001_00015(+) 2025-07-08T12:01:13.724319Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000004_00016_0000000002_00015(+) to d0000000000_00000000000000000004_00016_0000000002_00015(+) 2025-07-08T12:01:13.724322Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000004_00000_0000000000_00016(+) to d0000000000_00000000000000000004_00000_0000000000_00016(+) 2025-07-08T12:01:13.724324Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00006_0000000001_00014(+) to d0000000000_00000000000000000003_00006_0000000001_00014(+) 2025-07-08T12:01:13.724327Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00011_0000000001_00015(+) to d0000000000_00000000000000000002_00011_0000000001_00015(+) 2025-07-08T12:01:13.724330Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00016_0000000002_00015(+) to d0000000000_00000000000000000000_00016_0000000002_00015(+) 2025-07-08T12:01:13.724332Z node 13 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000000_00016(+) to d0000000000_00000000000000000000_00000_0000000000_00016(+) 2025-07-08T12:01:13.724922Z node 13 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 0 offset 6 count 1 actorID [13:7524679898205927806:2429] 2025-07-08T12:01:13.724932Z node 13 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 0 offset 4 count 2 actorID [13:7524679898205927806:2429] 2025-07-08T12:01:13.724938Z node 13 :PERSQUEUE DEBUG: Deleting head blob in L1. Partition 0 offset 4 count 0 actorID [13:7524679898205927806:2429] 2025-07-08T12:01:13.726263Z node 13 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 0 offset 6 partno 11 count 1 parts 15 2025-07-08T12:01:13.727252Z node 13 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 0 offset 4 partno 16 count 2 parts 15 2025-07-08T12:01:13.728021Z node 13 :PERSQUEUE DEBUG: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 0 offset 4 partno 0 count 0 parts 16 2025-07-08T12:01:13.733040Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-07-08T12:01:14.704754Z :INFO: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:14.704778Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:7:8 2025-07-08T12:01:14.704802Z :INFO: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2050 BytesRead: 62914628 MessagesRead: 8 BytesReadCompressed: 62914628 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:14.704828Z :NOTICE: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-07-08T12:01:14.704838Z :DEBUG: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] [] Abort session to cluster 2025-07-08T12:01:14.705097Z :NOTICE: [/Root] [/Root] [5b74ced6-d92ff425-1cc6d552-24496974] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:01:14.709323Z :INFO: [/Root] TraceId [] SessionId [grp-0|7043b4fc-70fc6047-2175c26-c788f332_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-07-08T12:01:14.709337Z :INFO: [/Root] TraceId [] SessionId [grp-0|7043b4fc-70fc6047-2175c26-c788f332_0] PartitionId [0] Generation [1] Write session will now close 2025-07-08T12:01:14.709346Z :DEBUG: [/Root] TraceId [] SessionId [grp-0|7043b4fc-70fc6047-2175c26-c788f332_0] PartitionId [0] Generation [1] Write session: aborting 2025-07-08T12:01:14.709494Z :INFO: [/Root] TraceId [] SessionId [grp-0|7043b4fc-70fc6047-2175c26-c788f332_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:14.709500Z :DEBUG: [/Root] TraceId [] SessionId [grp-0|7043b4fc-70fc6047-2175c26-c788f332_0] PartitionId [0] Generation [1] Write session: destroy 2025-07-08T12:01:14.709784Z node 13 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 grpc read done: success# 0, data# { } 2025-07-08T12:01:14.709798Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 grpc read failed 2025-07-08T12:01:14.709806Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 grpc closed 2025-07-08T12:01:14.709826Z node 13 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_13_1_9936142128828683555_v1 is DEAD 2025-07-08T12:01:14.710013Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_9936142128828683555_v1 2025-07-08T12:01:14.710020Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679936860634156:2593] destroyed 2025-07-08T12:01:14.710033Z node 13 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][topic_A] pipe [13:7524679936860634153:2590] disconnected; active server actors: 1 2025-07-08T12:01:14.710037Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][topic_A] pipe [13:7524679936860634153:2590] client test-consumer disconnected session test-consumer_13_1_9936142128828683555_v1 2025-07-08T12:01:14.710074Z node 13 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_13_1_9936142128828683555_v1 2025-07-08T12:01:14.711397Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: grp-0|7043b4fc-70fc6047-2175c26-c788f332_0 grpc read done: success: 0 data: 2025-07-08T12:01:14.711409Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: grp-0|7043b4fc-70fc6047-2175c26-c788f332_0 grpc read failed 2025-07-08T12:01:14.711416Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: grp-0|7043b4fc-70fc6047-2175c26-c788f332_0 grpc closed 2025-07-08T12:01:14.711419Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: grp-0|7043b4fc-70fc6047-2175c26-c788f332_0 is DEAD 2025-07-08T12:01:14.711583Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:14.711593Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:14.711843Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679898205927907:2451] destroyed 2025-07-08T12:01:14.711850Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679898205927924:2451] destroyed 2025-07-08T12:01:14.711860Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-07-08T12:01:14.713201Z :INFO: [/Root] TraceId [] SessionId [grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-07-08T12:01:14.713210Z :INFO: [/Root] TraceId [] SessionId [grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0] PartitionId [0] Generation [1] Write session will now close 2025-07-08T12:01:14.713215Z :DEBUG: [/Root] TraceId [] SessionId [grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0] PartitionId [0] Generation [1] Write session: aborting 2025-07-08T12:01:14.713311Z :INFO: [/Root] TraceId [] SessionId [grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:14.713315Z :DEBUG: [/Root] TraceId [] SessionId [grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0] PartitionId [0] Generation [1] Write session: destroy 2025-07-08T12:01:14.716870Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 6 sessionId: grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0 grpc read done: success: 0 data: 2025-07-08T12:01:14.716880Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0 grpc read failed 2025-07-08T12:01:14.716887Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0 grpc closed 2025-07-08T12:01:14.716890Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 6 sessionId: grp-1|8e0267e1-6d1c3465-178f58ed-b5df3b3e_0 is DEAD 2025-07-08T12:01:14.717104Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:14.717114Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:14.717161Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679898205927934:2459] destroyed 2025-07-08T12:01:14.717166Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [13:7524679898205927951:2459] destroyed 2025-07-08T12:01:14.717177Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037894, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-07-08T12:01:10.063528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000bb6/r3tmp/tmpTlJF25/pdisk_1.dat 2025-07-08T12:01:10.209468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:548:2472], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.209484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.209488Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.209503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:545:2470], Recipient [1:361:2356]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-07-08T12:01:10.209507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T12:01:10.226982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-07-08T12:01:10.227059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:01:10.227166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:10.227187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.227437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:01:10.227472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.227477Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.227516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:01:10.227554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:10.227559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:10.227576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.227643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.227648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.227664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227669Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227681Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:10.227693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.227735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.227739Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.227753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227757Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.227761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.227771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.227774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.227785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:10.228423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:10.228558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.228569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:10.228609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:10.228849Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:553:2477], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:555:2478] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:10.228857Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:10.228862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-07-08T12:01:10.228896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-07-08T12:01:10.228987Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:557:2480], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.228994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.228997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.229016Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:545:2470], Recipient [1:361:2356]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-07-08T12:01:10.229021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-07-08T12:01:10.229033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.229038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-07-08T12:01:10.229042Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.243859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:44:2091], Recipient [1:361:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-07-08T12:01:10.243891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-07-08T12:01:10.243897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:10.243960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:10.243972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-07-08T12:01:10.275896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.275935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.286724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:10.359946Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-07-08T12:01:10.360135Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:573:2493], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.360144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.360148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.360171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:361:2356]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-07-08T12:01:10.360176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:01:10.360187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.360214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:10.360223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.360298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:10.360307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.360327Z node 1 :FLAT_TX ... CollectSchemaChanged: false 2025-07-08T12:01:15.435623Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:15.435660Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.435663Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:15.446036Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:15.446081Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-07-08T12:01:15.446107Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:15.446115Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:15.446218Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:955:2755], Recipient [3:370:2365]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:15.446226Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:15.446230Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:15.446273Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:627:2531], Recipient [3:370:2365]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 627 RawX2: 12884904419 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-07-08T12:01:15.446278Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-07-08T12:01:15.446285Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 627 RawX2: 12884904419 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-07-08T12:01:15.446293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-07-08T12:01:15.446320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 627 RawX2: 12884904419 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-07-08T12:01:15.446329Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2025-07-08T12:01:15.446334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 627 RawX2: 12884904419 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-07-08T12:01:15.446346Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, shard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446349Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446353Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446367Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-07-08T12:01:15.446398Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:15.446519Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446523Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:15.446527Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-07-08T12:01:15.446537Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:896:2705] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-07-08T12:01:15.446542Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:627:2531] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-07-08T12:01:15.446562Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-07-08T12:01:15.446568Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:15.446598Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-07-08T12:01:15.446603Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-07-08T12:01:15.446634Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:370:2365], Recipient [3:370:2365]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:15.446640Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:15.446647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446654Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-07-08T12:01:15.446659Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:15.446665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-07-08T12:01:15.446669Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-07-08T12:01:15.446678Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-07-08T12:01:15.446681Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-07-08T12:01:15.446746Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:15.446749Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-07-08T12:01:15.446760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:370:2365], Recipient [3:370:2365]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:15.446762Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:15.446765Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-07-08T12:01:15.446769Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-07-08T12:01:15.446778Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:15.446782Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-07-08T12:01:15.446784Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-07-08T12:01:15.446788Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-07-08T12:01:15.446790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-07-08T12:01:15.446794Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-07-08T12:01:15.446801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:866:2685] message: TxId: 281474976715662 2025-07-08T12:01:15.446807Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-07-08T12:01:15.446813Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-07-08T12:01:15.446815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-07-08T12:01:15.446823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-07-08T12:01:15.446827Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-07-08T12:01:15.446828Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-07-08T12:01:15.446844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-07-08T12:01:15.446847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-07-08T12:01:15.446903Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:15.446911Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:866:2685] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-07-08T12:01:15.446986Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:880:2692], Recipient [3:370:2365]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:15.446989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:15.446992Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T12:01:15.464630Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:982:2772], serverId# [3:983:2773], sessionId# [0:0:0] 2025-07-08T12:01:15.464696Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyjr5r12pscz9ftej55zx5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzE0ODAzNS01OGIzOWViYi1lODc0YWZjNi03ZmZiMWFh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-07-08T12:01:15.482976Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyjr6a62vnevmbmgk0s7sj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWQ0ZTkyNjYtNzhiZWY5ZmQtZTUxZTIyZDEtZDEyMjA3ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> KqpImmediateEffects::MultipleEffectsWithIndex >> KqpScan::UnionAggregate [GOOD] >> KqpScan::UdfFailure >> KqpScan::EarlyFinish [GOOD] >> KqpScan::Effects |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |69.2%| [TA] $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpScan::Limit [GOOD] >> KqpScan::LongStringCombiner >> KqpScan::LeftSemiJoinSimple [GOOD] >> KqpScan::LMapFunction >> KqpScan::SelectExistsUnexpected [GOOD] >> KqpEffects::InsertAbort_Literal_Success ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupSplitBeforeReading [GOOD] Test command err: 2025-07-08T12:01:11.228418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116b/r3tmp/tmpvysnXk/pdisk_1.dat 2025-07-08T12:01:11.340389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.356358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:11.397947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:11.397983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:11.408603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:11.510150Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.510 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [KQP] kqp_host.cpp:1383: Compiled query: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint64)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'String)) '('columnConstrains '()) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/Test"))) (Void) $3)) ) 2025-07-08T12:01:11.510257Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.510 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 26us 2025-07-08T12:01:11.511409Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.511 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] type_ann_expr.cpp:46: Execution of [TypeAnnotationTransformer::DoTransform] took 450us 2025-07-08T12:01:11.511469Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.511 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_constraint.cpp:3204: Execution of [ConstraintTransformer::DoTransform] took 32us 2025-07-08T12:01:11.511481Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.511 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 5us 2025-07-08T12:01:11.511530Z node 1 :KQP_YQL DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.511 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:612: Execution of [EliminateCommonSubExpressions] took 44us 2025-07-08T12:01:11.512003Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.511 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('columnConstrains '())) (let $3 '('"Key" (OptionalType (DataType 'Uint64)) $2 '())) (let $4 '('"Value" (OptionalType (DataType 'String)) $2 '())) (let $5 (KiCreateTable! world $1 '"/Root/Test" '($3 $4) '('"Key") '() '() '() '() '() '() '"table" '"false" '0 '0)) (return (Commit! $5 $1 '('('"mode" '"flush")))) ) 2025-07-08T12:01:11.512015Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.512 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:59: Begin, root #69 2025-07-08T12:01:11.512019Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.512 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:72: Collect unused nodes for root #69, status: Ok 2025-07-08T12:01:11.512091Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.512 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:466: Register async execution for node #68 2025-07-08T12:01:11.512099Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.512 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:87: Finish, output #69, status: Async 2025-07-08T12:01:11.512729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:11.723339Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:133: Completed async execution for node #68 2025-07-08T12:01:11.723371Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #68 2025-07-08T12:01:11.723383Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:59: Begin, root #69 2025-07-08T12:01:11.723390Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:72: Collect unused nodes for root #69, status: Ok 2025-07-08T12:01:11.723405Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:577: Node #69 finished execution 2025-07-08T12:01:11.723421Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:594: Node #69 created 0 trackable nodes: 2025-07-08T12:01:11.723427Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:87: Finish, output #69, status: Ok 2025-07-08T12:01:11.723433Z node 1 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=1&id=ZjQzNmZlODgtZDhkOTk0Ni00Y2M2M2NmNC01Y2U1ZDkxOQ== 2025-07-08 12:01:11.723 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #69 2025-07-08T12:01:11.725699Z node 1 :KQP_YQL INFO: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.725 INFO ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [KQP] kqp_host.cpp:1383: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/Test"))) (PersistableRepr '((AsStruct '('"Key" (Uint32 '"201")) '('"Value" (String '"Value1"))) (AsStruct '('"Key" (Uint32 '"202")) '('"Value" (String '"Value2"))) (AsStruct '('"Key" (Uint32 '"203")) '('"Value" (String '"Value3"))) (AsStruct '('"Key" (Uint32 '"803")) '('"Value" (String '"Value3"))))) '('('mode 'replace)))) ) 2025-07-08T12:01:11.725774Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.725 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 26us 2025-07-08T12:01:11.726497Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] type_ann_expr.cpp:46: Execution of [TypeAnnotationTransformer::DoTransform] took 227us 2025-07-08T12:01:11.726646Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_constraint.cpp:3204: Execution of [ConstraintTransformer::DoTransform] took 109us 2025-07-08T12:01:11.726665Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 10us 2025-07-08T12:01:11.726735Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:612: Execution of [EliminateCommonSubExpressions] took 59us 2025-07-08T12:01:11.726781Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core] yql_co_simple1.cpp:4833: Convert over Uint32 '201 2025-07-08T12:01:11.726797Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core] yql_co_simple1.cpp:4833: Convert over Uint32 '202 2025-07-08T12:01:11.726817Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core] yql_co_simple1.cpp:4833: Convert over Uint32 '203 2025-07-08T12:01:11.726829Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [core] yql_co_simple1.cpp:4833: Convert over Uint32 '803 2025-07-08T12:01:11.726934Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] type_ann_expr.cpp:46: Execution of [TypeAnnotationTransformer::DoTransform] took 53us 2025-07-08T12:01:11.726988Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_constraint.cpp:3204: Execution of [ConstraintTransformer::DoTransform] took 30us 2025-07-08T12:01:11.727003Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.726 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:599: Execution of [UpdateCompletness] took 7us 2025-07-08T12:01:11.727037Z node 1 :KQP_YQL DEBUG: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.727 DEBUG ydb-core-kqp-ut-scan(pid=423189, tid=0x00007FF6BBC7DD40) [perf] yql_expr_csee.cpp:612: Execution of [EliminateCommonSubExpressions] took 28us 2025-07-08T12:01:11.727458Z node 1 :KQP_YQL INFO: TraceId: 01jzmyjmhc6hsypvj58jcr2kt3, SessionId: CompileActor 2025-07-08 12:01:11.727 INFO ydb-cor ... . Database : . PoolId : . }. CA StateFunc 276037646 2025-07-08T12:01:11.926428Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:735:2614], TxId: 281474976715660, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jzmyjmjk2zt526exk49p0jxs. SessionId : ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. InputTransform[0] fatal error: {
: Error: ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp:211 GetShardReads(): requirement it != ReadsPerShard.end() failed } 2025-07-08T12:01:11.926839Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:735:2614], TxId: 281474976715660, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jzmyjmjk2zt526exk49p0jxs. SessionId : ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: ydb/core/kqp/runtime/kqp_stream_lookup_actor.cpp:211 GetShardReads(): requirement it != ReadsPerShard.end() failed }. 2025-07-08T12:01:11.926871Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715660, task: 2. pass away 2025-07-08T12:01:11.926902Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715660;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-07-08T12:01:11.926985Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:736:2615], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01jzmyjmjk2zt526exk49p0jxs. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. CA StateFunc 271646735 2025-07-08T12:01:11.926996Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:736:2615], TxId: 281474976715660, task: 3. Ctx: { TraceId : 01jzmyjmjk2zt526exk49p0jxs. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-07-08T12:01:11.927002Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976715660, task: 3. pass away 2025-07-08T12:01:11.927009Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715660;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-07-08T12:01:11.927819Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=, ActorId: [1:696:2585], ActorState: ExecuteState, TraceId: 01jzmyjmjk2zt526exk49p0jxs, Create QueryResponse for error on request, msg: 2025-07-08T12:01:11.928019Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1500, txId: 281474976715659] shutting down 2025-07-08T12:01:11.928129Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyjmjk2zt526exk49p0jxs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q2OTljY2YtYTY0MTA5MTYtNWE4MjViZjUtZTZkYmM0ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root assertion failed at ydb/core/kqp/ut/scan/kqp_split_ut.cpp:451, void NKikimr::NKqp::NTestSuiteKqpSplit::TTestSetup::AssertSuccess(): (reply->Get()->Record.GetYdbStatus() == Ydb::StatusIds::SUCCESS) failed: (INTERNAL_ERROR != SUCCESS) , with diff: (INT|SUCC)E(RNAL_ERROR|SS) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+137 (0x136EB729) NKikimr::NKqp::NTestSuiteKqpSplit::TTestSetup::AssertSuccess()+665 (0x1345C949) NKikimr::NKqp::NTestSuiteKqpSplit::TTestCaseUndeliveryOnFinishedRead::Execute_(NUnitTest::TTestContext&)+2400 (0x1345B060) NKikimr::NKqp::NTestSuiteKqpSplit::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x134643F7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+126 (0x136ED5EE) NKikimr::NKqp::NTestSuiteKqpSplit::TCurrentTest::Execute()+395 (0x13463DBB) NUnitTest::TTestFactory::Execute()+803 (0x136EDD63) NUnitTest::RunMain(int, char**)+3021 (0x136FF1ED) ??+0 (0x7FF6BBD92D90) __libc_start_main+128 (0x7FF6BBD92E40) _start+41 (0x1249A029) Trying to start YDB, gRPC: 23083, MsgBus: 11665 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00116b/r3tmp/tmpj5vOmL/pdisk_1.dat 2025-07-08T12:01:12.386355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:12.400520Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23083, node 2 2025-07-08T12:01:12.415764Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.415782Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.415784Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.415832Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11665 2025-07-08T12:01:12.473231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.473266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.477293Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.503769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.507718Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:12.549612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.571253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:12.601195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.613099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.731976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.742533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.753694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.766547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.780563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.794247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.809136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.970403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.046396Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjnrp81mt36f65yttf5q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmExMzZhZGYtZGZjYzc4YjgtN2NlZTEyYzItY2E0YjdkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:13.049646Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyjnrp81mt36f65yttf5q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmExMzZhZGYtZGZjYzc4YjgtN2NlZTEyYzItY2E0YjdkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:13.052357Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyjnrp81mt36f65yttf5q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MmExMzZhZGYtZGZjYzc4YjgtN2NlZTEyYzItY2E0YjdkOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:13.119105Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyjnv1b05wt1c1wat6z6yg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjQxOGY0NDgtZWJlNzZjNGUtOTdlNzhkODQtN2VmYWFjODc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 resume evread ----------------------------------------------------------- 2025-07-08T12:01:13.135090Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976073162, txId: 281474976715674] shutting down >> KqpSplit::StreamLookupSplitAfterFirstResult [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead >> TSchemeShardViewTest::AsyncCreateSameView >> KqpScan::AggregateNoColumn [GOOD] >> KqpScan::AggregateNoColumnNoRemaps >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> TSchemeShardViewTest::ReadOnlyMode >> KqpScan::RestrictSqlV0 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::SelectExistsUnexpected [GOOD] Test command err: Trying to start YDB, gRPC: 20309, MsgBus: 29447 2025-07-08T12:01:13.789000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679937064548757:2164];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:13.789117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00113e/r3tmp/tmphB8KtQ/pdisk_1.dat 2025-07-08T12:01:13.846459Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20309, node 1 2025-07-08T12:01:13.866353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:13.866369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:13.866371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:13.866406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29447 TClient is connected to server localhost:29447 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:13.917553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:13.917577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:13.918483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:13.932213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.941308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:13.961868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.987397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.008583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.020251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:14.144909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.157446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.167338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.178394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.195719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.251002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.267477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.518479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.579240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.591104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:14.793307Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:14.908497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.078741Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075122, txId: 281474976715680] shutting down 2025-07-08T12:01:15.118485Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075150, txId: 281474976715682] shutting down 2025-07-08T12:01:15.175156Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075213, txId: 281474976715684] shutting down 2025-07-08T12:01:15.230866Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075269, txId: 281474976715686] shutting down Trying to start YDB, gRPC: 19876, MsgBus: 25210 2025-07-08T12:01:15.629068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679948179112463:2230];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.631747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00113e/r3tmp/tmp2O4pMS/pdisk_1.dat 2025-07-08T12:01:15.661889Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19876, node 2 2025-07-08T12:01:15.717569Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.717583Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.717585Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.717640Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25210 2025-07-08T12:01:15.733326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.733353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.737358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:15.794261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.795744Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:15.841809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.868188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:15.900939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.919447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.061421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.072221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.130426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.140431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.162837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.181817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.193190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.355780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.516417Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076473, txId: 281474976715672] shutting down 2025-07-08T12:01:16.620108Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:16.645482Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076606, txId: 281474976715675] shutting down >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> KqpSplit::AfterResult+Unspecified [GOOD] >> KqpScan::LMapFunction [GOOD] >> KqpScan::Like |69.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> KqpScan::Effects [GOOD] >> KqpScan::DropRedundantSortByPk >> TSchemeShardViewTest::ReadOnlyMode [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RestrictSqlV0 [GOOD] Test command err: Trying to start YDB, gRPC: 13274, MsgBus: 29354 2025-07-08T12:01:15.574022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679948698973218:2081];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.574380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f2/r3tmp/tmpk4Od5j/pdisk_1.dat 2025-07-08T12:01:15.639465Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13274, node 1 2025-07-08T12:01:15.657108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.657120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.657123Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.657166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29354 2025-07-08T12:01:15.676752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.676778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.677531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:15.717782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.721376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:15.725928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.810318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.855053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.882122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.960546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.973843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.987259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.005129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.024572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.033763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.049214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.201664Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679952993942928:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjrwxajpjn3phtg6fhamr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U4YTA0NmQtZDUzZmZkZjUtYzJiZWVhOWItNGU3YTA2ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.202789Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076200, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 19636, MsgBus: 22803 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f2/r3tmp/tmph9O7K6/pdisk_1.dat 2025-07-08T12:01:16.509361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:16.509789Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19636, node 2 2025-07-08T12:01:16.521416Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.521429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.521431Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.521470Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22803 TClient is connected to server localhost:22803 2025-07-08T12:01:16.587847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.587886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.588843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:16.601443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.605736Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.613679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.639351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:16.693643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.718124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.931967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.954367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.976876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.987966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.000561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.014275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.028462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.197541Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679956117899602:2447], status: GENERIC_ERROR, issues:
:1:0: Error: V0 syntax is disabled 2025-07-08T12:01:17.197894Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmY4MTljZDktOGY4YjZkOWItNjg4N2VkZjQtNDZhOWE0NTA=, ActorId: [2:7524679956117899600:2446], ActorState: ExecuteState, TraceId: 01jzmyjswabh4m4yts0dnk80em, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:1:0: Error: V0 syntax is disabled >> TSchemeShardViewTest::EmptyQueryText >> TSchemeShardViewTest::EmptyName >> KqpScan::LongStringCombiner [GOOD] >> KqpScan::LimitOverSecondaryIndexRead >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> TSchemeShardViewTest::CreateView ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResult+Unspecified [GOOD] Test command err: Trying to start YDB, gRPC: 2082, MsgBus: 15904 2025-07-08T12:01:15.110070Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679948620386851:2238];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.145258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001119/r3tmp/tmpjYIDbr/pdisk_1.dat 2025-07-08T12:01:15.176092Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2082, node 1 2025-07-08T12:01:15.184518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.184529Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.184531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.184567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15904 TClient is connected to server localhost:15904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:15.246436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.246465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.246960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.247352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:15.263511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.331948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.351961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:15.363262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.494839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.502609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.516249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.531299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.548094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.557657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.574813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.792332Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679948620389131:2445] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjrfn26casyj9qkbypk09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM2YjVhYmEtZWEwZjY4YjEtMzQ1ZTA4NDEtMzgxZWZmNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database 2025-07-08T12:01:15.792395Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjrfn26casyj9qkbypk09, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWM2YjVhYmEtZWEwZjY4YjEtMzQ1ZTA4NDEtMzgxZWZmNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:16.107731Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:16.227259Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976075836, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 11194, MsgBus: 17805 2025-07-08T12:01:16.404737Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679952581019179:2088];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.406230Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001119/r3tmp/tmp0NcMAt/pdisk_1.dat 2025-07-08T12:01:16.469625Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11194, node 2 2025-07-08T12:01:16.489473Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.489489Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.489491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.489530Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:16.509239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.509270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:17805 2025-07-08T12:01:16.513264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:16.585699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.590880Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.610008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.637246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.684678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.718224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.906443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.922888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.942425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.962801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.976284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.986421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.004078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.169016Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjsv2c2bjrsthpnw60js4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTAzZjQ1NzUtZWZmOWZhNzktZjMzNTVlYS1kZTdiNzVlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:17.409230Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:17.568270Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976077215, txId: 281474976715670] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:17.512180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:17.512209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:17.512214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:17.512219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:17.512232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:17.512236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:17.512248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:17.512262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:17.512346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:17.534945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:17.534968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:17.539368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:17.539415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:17.539439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:17.540826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:17.540866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:17.540945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.541115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:17.541729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.541774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:17.541940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.541948Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.541969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:17.541976Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.541981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:17.541999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.542901Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:17.555780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:17.555851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.555900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:17.555928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:17.555936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.556550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.556572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:17.556606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.556615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:17.556618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:17.556624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:17.556973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.556985Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:17.556991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:17.557264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.557272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.557276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.557281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.557726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:17.558063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:17.558095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:17.558221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.558239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:17.558245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.558297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:17.558301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.558325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:17.558336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:17.558654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.558660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.558693Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.558697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:17.558703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.558710Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:17.558718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:17.558720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.558723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:17.558725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.558728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:17.558731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.558734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:17.558736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:17.558744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:17.558748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:17.558750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:17.559053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:17.559064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... : 101, at schemeshard: 72057594046678944 2025-07-08T12:01:17.562994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, operation: CREATE VIEW, path: /MyRoot/MyView 2025-07-08T12:01:17.563072Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:01:17.563327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-07-08T12:01:17.563348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-07-08T12:01:17.563390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.563402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:17.563407Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-07-08T12:01:17.563424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-07-08T12:01:17.563441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:17.563451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:01:17.563735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.563740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.563768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:17.563780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.563783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-07-08T12:01:17.563786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:01:17.563832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.563836Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:01:17.563844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:01:17.563846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:17.563850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:01:17.563851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:17.563854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-07-08T12:01:17.563857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:17.563859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:01:17.563862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:01:17.563868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:17.563872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-07-08T12:01:17.563875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-07-08T12:01:17.563876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:01:17.563935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:17.563941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:17.563944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:01:17.563947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-07-08T12:01:17.563950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:17.564018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:17.564024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:17.564027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:01:17.564029Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:01:17.564031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:17.564037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-07-08T12:01:17.564577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:01:17.564612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-07-08T12:01:17.564653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-07-08T12:01:17.564658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-07-08T12:01:17.564675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:01:17.564676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-07-08T12:01:17.564681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:01:17.564683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T12:01:17.564722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T12:01:17.564740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:01:17.564745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:01:17.564748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:307:2298] 2025-07-08T12:01:17.564762Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:01:17.564766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:01:17.564768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:307:2298] 2025-07-08T12:01:17.564777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:01:17.564779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:307:2298] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-07-08T12:01:17.564818Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:17.564839Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 25us result status StatusSuccess 2025-07-08T12:01:17.564907Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata >> TSchemeShardViewTest::EmptyName [GOOD] >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:17.620625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:17.620645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:17.620649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:17.620652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:17.620661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:17.620663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:17.620672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:17.620682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:17.620747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:17.631452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:17.631473Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:17.635345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:17.635412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:17.635452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:17.637195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:17.637254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:17.637351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.637939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:17.639015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.639060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:17.639282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.639294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.639310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:17.639317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.639322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:17.639347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.641523Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:17.659513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:17.659584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.659640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:17.659678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:17.659688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.660555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.660594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:17.660641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.660650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:17.660654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:17.660661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:17.661137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.661151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:17.661156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:17.661498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.661509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.661514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.661520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.662112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:17.662501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:17.662540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:17.662722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.662747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:17.662755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.662821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:17.662827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:17.662855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:17.662867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:17.663280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.663288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.663328Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.663333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:17.663344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.663353Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:17.663364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:17.663368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.663372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:17.663375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.663379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:17.663395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:17.663400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:17.663404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:17.663414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:17.663419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:17.663423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:17.663817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:17.663832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... , at schemeshard: 72057594046678944 2025-07-08T12:01:17.736839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.736999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.737019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.737026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.737032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.738742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:17.739404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.739423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.739448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:17.739459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.739464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:17.739991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:380:2349] sender: [1:437:2058] recipient: [1:15:2062] 2025-07-08T12:01:17.772555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:17.772634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-07-08T12:01:17.772644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-07-08T12:01:17.772668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:01:17.772687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-07-08T12:01:17.772697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:17.776570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-07-08T12:01:17.776640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-07-08T12:01:17.776697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.776707Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-07-08T12:01:17.776719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-07-08T12:01:17.776748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:17.779447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-07-08T12:01:17.779511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-07-08T12:01:17.779671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:17.779701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:17.779711Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-07-08T12:01:17.779751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-07-08T12:01:17.779786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:17.779798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-07-08T12:01:17.785557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:17.785578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:17.785630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:01:17.785659Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:17.785665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:429:2387], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-07-08T12:01:17.785672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:429:2387], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-07-08T12:01:17.785684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-07-08T12:01:17.785693Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-07-08T12:01:17.785707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T12:01:17.785711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T12:01:17.785717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-07-08T12:01:17.785720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T12:01:17.785724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-07-08T12:01:17.785731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-07-08T12:01:17.785736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-07-08T12:01:17.785740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-07-08T12:01:17.785763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:01:17.785785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-07-08T12:01:17.785789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-07-08T12:01:17.785792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:01:17.786035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:01:17.786053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:01:17.786059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:01:17.786065Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:01:17.786070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:17.786261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:01:17.786280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-07-08T12:01:17.786285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-07-08T12:01:17.786289Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:01:17.786293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:01:17.786306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-07-08T12:01:17.787132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:01:17.787462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> IncrementalBackup::E2EBackupCollection [GOOD] >> KqpScan::Like [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> KqpScan::UdfFailure [GOOD] >> TSchemeShardViewTest::DropView >> KqpScan::AggregateNoColumnNoRemaps [GOOD] >> KqpScan::AggregateEmptySum >> TSchemeShardViewTest::AsyncCreateDifferentViews ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:18.260295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:18.260323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.260329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:18.260334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:18.260348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:18.260351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:18.260365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.260379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:18.260461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:18.275359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:18.275383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:18.280932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:18.281021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:18.281059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:18.282544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:18.282594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:18.282710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.282871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:18.283640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.283686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:18.283929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.283940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.283965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:18.283972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.283977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:18.284004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.285471Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:18.304000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:18.304086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.304155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:18.304197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:18.304209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.305272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.305309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:18.305363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.305373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:18.305378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:18.305383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:18.306007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.306020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:18.306025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:18.306371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.306380Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.306386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.306393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.306917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:18.307318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:18.307362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:18.307539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.307563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.307574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.307640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:18.307647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.307679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.307690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:18.308068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.308076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.308123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.308128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:18.308137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.308143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:18.308154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.308158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.308163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.308165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.308169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:18.308174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.308179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:18.308182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:18.308192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:18.308198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:18.308202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:18.308580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:18.308595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:18.308600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-07-08T12:01:18.308605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-07-08T12:01:18.308613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.308626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-07-08T12:01:18.309177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-07-08T12:01:18.309279Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-07-08T12:01:18.309422Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] Bootstrap 2025-07-08T12:01:18.310821Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] Become StateWork (SchemeCache [1:276:2267]) 2025-07-08T12:01:18.311469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:18.311526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-07-08T12:01:18.311532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-07-08T12:01:18.311555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:18.311572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-07-08T12:01:18.311579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:18.311789Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:01:18.312458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-07-08T12:01:18.312494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-07-08T12:01:18.312556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.312562Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-07-08T12:01:18.312569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-07-08T12:01:18.312590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:18.312661Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:01:18.312970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-07-08T12:01:18.313002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-07-08T12:01:18.313071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.313090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.313097Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-07-08T12:01:18.313121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-07-08T12:01:18.313150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.313159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-07-08T12:01:18.313577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.313586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.313627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:18.313646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.313651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-07-08T12:01:18.313656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-07-08T12:01:18.313747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.313754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-07-08T12:01:18.313763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:01:18.313767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:18.313773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-07-08T12:01:18.313775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:18.313793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-07-08T12:01:18.313798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-07-08T12:01:18.313802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-07-08T12:01:18.313806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-07-08T12:01:18.313817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:18.313822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-07-08T12:01:18.313826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-07-08T12:01:18.313832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:01:18.313913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:18.313922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:18.313927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:01:18.313932Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-07-08T12:01:18.313935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:18.314055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:18.314065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-07-08T12:01:18.314068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-07-08T12:01:18.314071Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:01:18.314075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:18.314083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-07-08T12:01:18.314841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-07-08T12:01:18.314919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::StreamLookupRetryAttemptForFinishedRead [GOOD] Test command err: Trying to start YDB, gRPC: 19593, MsgBus: 21176 2025-07-08T12:01:15.541101Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679949157739466:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.541144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f5/r3tmp/tmpqE4rRT/pdisk_1.dat 2025-07-08T12:01:15.611750Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19593, node 1 2025-07-08T12:01:15.640812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.640838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.641964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:15.652802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.652820Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.652822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.652862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21176 TClient is connected to server localhost:21176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:15.726228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.729020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:15.753136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.822658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:15.847283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.866286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.007147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.025450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.035195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.050704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.062697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.079478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.089604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.271274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.456093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jzmyjs1j9e7jk09nzyna3n58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YTA1NmMtN2FkMDIzM2YtN2YwYjRmOTYtZTAyNDQ1NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:16.462344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jzmyjs1j9e7jk09nzyna3n58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YTA1NmMtN2FkMDIzM2YtN2YwYjRmOTYtZTAyNDQ1NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:16.465464Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jzmyjs1j9e7jk09nzyna3n58, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI2YTA1NmMtN2FkMDIzM2YtN2YwYjRmOTYtZTAyNDQ1NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:16.557004Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:16.570386Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679953452709532:2474] TxId: 281474976710675. Ctx: { TraceId: 01jzmyjs5v6dzqtqaex33pnr8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E2NzI1MmItODY0OGIxZGYtYzI4MjZiM2QtM2U1MWFhOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database 2025-07-08T12:01:16.570473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jzmyjs5v6dzqtqaex33pnr8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2E2NzI1MmItODY0OGIxZGYtYzI4MjZiM2QtM2U1MWFhOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710676 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:16.961987Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076613, txId: 281474976710674] shutting down Trying to start YDB, gRPC: 26884, MsgBus: 62486 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f5/r3tmp/tmpBbx5nc/pdisk_1.dat 2025-07-08T12:01:17.287727Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679954638566368:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.287773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:17.306551Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26884, node 2 2025-07-08T12:01:17.321161Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.321175Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.321177Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.321226Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62486 TClient is connected to server localhost:62486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:17.387906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.387941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.388930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:17.390374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.391591Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:17.421693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.446687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.488623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.504459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.705977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.717669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.730596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.742308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.756365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.771166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.784257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.923381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.995022Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjtkf668kyknrsrbydjq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzYwMjBjY2ItYTYzNDgwOS1hMmI4MDQ1My0yNjhmMGYzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:17.996704Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyjtkf668kyknrsrbydjq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzYwMjBjY2ItYTYzNDgwOS1hMmI4MDQ1My0yNjhmMGYzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:17.997662Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyjtkf668kyknrsrbydjq7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzYwMjBjY2ItYTYzNDgwOS1hMmI4MDQ1My0yNjhmMGYzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:18.066335Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyjtngcnkxvr9wfp03wm3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWYxY2UxZTgtZDEwOGY5NDctZDI1YmI5ODAtNTQ0YjhhZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- 2025-07-08T12:01:18.068583Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976078111, txId: 281474976715674] shutting down >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> KqpScan::DropRedundantSortByPk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:18.299577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:18.299611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.299616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:18.299622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:18.299635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:18.299639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:18.299648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.299661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:18.299747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:18.312280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:18.312296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:18.315791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:18.315859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:18.315900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:18.317747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:18.317814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:18.317952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.318645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:18.319723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.319766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:18.320028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.320037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.320060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:18.320068Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.320073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:18.320106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.321760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:18.341692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:18.341758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.341830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:18.341864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:18.341872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.342704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.342736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:18.342787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.342797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:18.342803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:18.342809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:18.343301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.343313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:18.343318Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:18.343680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.343690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.343696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.343704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.344296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:18.344696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:18.344738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:18.344922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.344945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.344981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.345058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:18.345064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.345094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.345106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:18.346540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.346551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.346601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.346606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:18.346616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.346623Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:18.346635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.346639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.346644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.346647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.346652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:18.346657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.346662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:18.346666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:18.346678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:18.346684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:18.346689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:18.347108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:18.347129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:18.347135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-07-08T12:01:18.347140Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-07-08T12:01:18.347147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.347162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-07-08T12:01:18.348014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-07-08T12:01:18.348089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-07-08T12:01:18.348206Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] Bootstrap 2025-07-08T12:01:18.349570Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] Become StateWork (SchemeCache [1:276:2267]) 2025-07-08T12:01:18.350360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:18.350407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-07-08T12:01:18.350416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-07-08T12:01:18.350442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, at schemeshard: 72057594046678944 2025-07-08T12:01:18.350675Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:01:18.351393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.351432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp:151, operation: CREATE VIEW, path: /MyRoot/ 2025-07-08T12:01:18.351508Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:18.329191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:18.329215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.329221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:18.329226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:18.329238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:18.329241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:18.329253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:18.329264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:18.329324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:18.339536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:18.339561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:18.344994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:18.345063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:18.345096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:18.346586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:18.346638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:18.346755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.346928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:18.347823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.347860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:18.348055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.348065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.348078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:18.348083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.348087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:18.348105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.349298Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:18.366139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:18.366212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.366271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:18.366301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:18.366307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.373441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.373485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:18.373541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.373555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:18.373561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:18.373567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:18.374339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.374353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:18.374360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:18.374737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.374747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.374753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.374760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.375410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:18.375925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:18.375978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:18.376175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.376205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.376232Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.376306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:18.376313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:18.376345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.376357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:18.377255Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.377267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.377335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.377342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:18.377353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.377359Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:18.377372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.377376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.377381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:18.377384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.377388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:18.377393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:18.377398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:18.377402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:18.377414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:18.377420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:18.377425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:18.377906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:18.377924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... de 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-07-08T12:01:18.384857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-07-08T12:01:18.384883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:18.384905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-07-08T12:01:18.384914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:18.385223Z node 1 :TX_PROXY DEBUG: actor# [1:271:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:01:18.385990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-07-08T12:01:18.386043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-07-08T12:01:18.386107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.386117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-07-08T12:01:18.386127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-07-08T12:01:18.386155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:18.386241Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:01:18.386834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-07-08T12:01:18.386876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-07-08T12:01:18.386955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:18.386978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:18.386986Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-07-08T12:01:18.387015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-07-08T12:01:18.387050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:18.387061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-07-08T12:01:18.387539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:18.387546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:18.387583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:18.387600Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:18.387604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-07-08T12:01:18.387608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-07-08T12:01:18.387696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-07-08T12:01:18.387703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-07-08T12:01:18.387711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-07-08T12:01:18.387714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:01:18.387718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-07-08T12:01:18.387720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:01:18.387723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-07-08T12:01:18.387727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-07-08T12:01:18.387730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-07-08T12:01:18.387732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-07-08T12:01:18.387742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:18.387746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-07-08T12:01:18.387753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-07-08T12:01:18.387755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-07-08T12:01:18.387822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:01:18.387829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:01:18.387832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-07-08T12:01:18.387836Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-07-08T12:01:18.387839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:18.387935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:01:18.387944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-07-08T12:01:18.387947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-07-08T12:01:18.387951Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-07-08T12:01:18.387954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:18.387962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-07-08T12:01:18.388994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-07-08T12:01:18.389081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-07-08T12:01:18.389131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-07-08T12:01:18.389137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-07-08T12:01:18.389210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T12:01:18.389223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:01:18.389226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:301:2292] TestWaitNotification: OK eventTxId 101 2025-07-08T12:01:18.389276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:18.389303Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 36us result status StatusSuccess 2025-07-08T12:01:18.389384Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::Like [GOOD] Test command err: Trying to start YDB, gRPC: 21178, MsgBus: 25646 2025-07-08T12:01:15.874290Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679947614911472:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.874440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010ec/r3tmp/tmpd9fjsa/pdisk_1.dat 2025-07-08T12:01:15.939112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21178, node 1 2025-07-08T12:01:15.958545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.958561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.958563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.958615Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:15.969327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.969357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.970476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25646 TClient is connected to server localhost:25646 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.011251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.017505Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.026167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:16.093916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.119177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.181872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.275850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.289633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.301194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.317716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.327617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.343578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.358760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.542075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.660600Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679951909881410:2468] TxId: 281474976715673. Ctx: { TraceId: 01jzmyjs9v83m57p13r0b00b0t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjU5Y2RhOGUtMTRhMzkxOTAtYmMzY2VlOWUtOTg2MTI0Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.667347Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076697, txId: 281474976715672] shutting down Trying to start YDB, gRPC: 21926, MsgBus: 27956 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010ec/r3tmp/tmpPuwAQw/pdisk_1.dat 2025-07-08T12:01:17.023668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:17.034075Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21926, node 2 2025-07-08T12:01:17.045502Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.045516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.045518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.045563Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27956 TClient is connected to server localhost:27956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.129366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.129390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.129834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.130439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:17.133212Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:17.424417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.555110Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976077593, txId: 281474976715660] shutting down [[[2];[1000];["Dogecoin"]];[[4];[1];["XTC"]];[[5];[2];["Cardano"]];[[6];[3];["Tether"]]] Trying to start YDB, gRPC: 22003, MsgBus: 21625 2025-07-08T12:01:17.745873Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679957815456370:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.745893Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010ec/r3tmp/tmpsG0ygS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22003, node 3 2025-07-08T12:01:17.767004Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:17.767415Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.767421Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.767423Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.767463Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21625 TClient is connected to server localhost:21625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:17.849402Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.849441Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.849803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.850301Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:17.866058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.878513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.896737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.918616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.089276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.098155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.105331Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.120237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.133649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.148295Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.163584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.316500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::UdfFailure [GOOD] Test command err: Trying to start YDB, gRPC: 5387, MsgBus: 25636 2025-07-08T12:01:15.244220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679945627507730:2064];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.244279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001118/r3tmp/tmpQxo2qZ/pdisk_1.dat 2025-07-08T12:01:15.344464Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5387, node 1 2025-07-08T12:01:15.365132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.365145Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.365148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.365189Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25636 TClient is connected to server localhost:25636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:15.416126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.416150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.417244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:15.423133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.424873Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:15.431460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.451322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:15.476242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.489494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.634640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.643706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.658555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.671074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.685371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.700309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.769940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.050348Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679949922477554:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjrnfcw2hw5vsfa8y7cvt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlkYTQ1NWYtMTUyMzNiNzktOGQ0NzFiZjYtMmY2NDIzNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.258063Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:16.266985Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679949922477665:2446] TxId: 281474976715672. Ctx: { TraceId: 01jzmyjrnfcw2hw5vsfa8y7cvt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlkYTQ1NWYtMTUyMzNiNzktOGQ0NzFiZjYtMmY2NDIzNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.268324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076095, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 12410, MsgBus: 29465 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001118/r3tmp/tmpEHt5Qm/pdisk_1.dat 2025-07-08T12:01:16.658432Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:16.689194Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12410, node 2 2025-07-08T12:01:16.707747Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.707759Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.707761Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.707809Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29465 2025-07-08T12:01:16.743747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.743788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.744886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.790864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.792513Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.801891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.871608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.936416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:16.955119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.096256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.107410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.119777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.133501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.147648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.163464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.175258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.369775Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679957974126269:2450], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGJjMDY2ZjYtOGExY2YyNTYtMjY5MmM5N2MtNWY4YjYxNWU=. TraceId : 01jzmyjt0r4csd755nsxmm57h0. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-07-08T12:01:17.370079Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGJjMDY2ZjYtOGExY2YyNTYtMjY5MmM5N2MtNWY4YjYxNWU=, ActorId: [2:7524679957974126240:2446], ActorState: ExecuteState, TraceId: 01jzmyjt0r4csd755nsxmm57h0, Create QueryResponse for error on request, msg: 2025-07-08T12:01:17.370220Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976077411, txId: 281474976715670] shutting down 2025-07-08T12:01:17.370251Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679957974126270:2451], TxId: 281474976715671, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NGJjMDY2ZjYtOGExY2YyNTYtMjY5MmM5N2MtNWY4YjYxNWU=. TraceId : 01jzmyjt0r4csd755nsxmm57h0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. Trying to start YDB, gRPC: 25341, MsgBus: 17567 2025-07-08T12:01:17.723310Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679956716600269:2141];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.723406Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001118/r3tmp/tmpSmDSsc/pdisk_1.dat 2025-07-08T12:01:17.737645Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25341, node 3 2025-07-08T12:01:17.749466Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.749483Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.749485Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.749522Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17567 TClient is connected to server localhost:17567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.828970Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.828997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.829341Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.830391Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:17.831337Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:17.837688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.849750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.869235Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.879701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.083608Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.092395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.148261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.163100Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.175702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.191850Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.204579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.399082Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679961011569965:2450], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YWEzZDMyOTktZGQzNzYyZGMtZDliOTRjZTAtMzJlOTgwOTg=. TraceId : 01jzmyjv17ahw8gb176tekq2vq. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-07-08T12:01:18.399180Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679961011569966:2451], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jzmyjv17ahw8gb176tekq2vq. SessionId : ydb://session/3?node_id=3&id=YWEzZDMyOTktZGQzNzYyZGMtZDliOTRjZTAtMzJlOTgwOTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-07-08T12:01:18.399196Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YWEzZDMyOTktZGQzNzYyZGMtZDliOTRjZTAtMzJlOTgwOTg=, ActorId: [3:7524679961011569935:2446], ActorState: ExecuteState, TraceId: 01jzmyjv17ahw8gb176tekq2vq, Create QueryResponse for error on request, msg: 2025-07-08T12:01:18.399291Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976078440, txId: 281474976715670] shutting down >> TSchemeShardViewTest::AsyncDropSameView >> TSchemeShardViewTest::DropView [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-07-08T12:01:10.028091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000c10/r3tmp/tmpeLqbdN/pdisk_1.dat 2025-07-08T12:01:10.156003Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:548:2472], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.156032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.156037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.156055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:545:2470], Recipient [1:361:2356]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-07-08T12:01:10.156058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-07-08T12:01:10.177328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-07-08T12:01:10.177399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.177821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-07-08T12:01:10.177883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-07-08T12:01:10.177908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.177926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.178109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-07-08T12:01:10.178145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.178149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.178197Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-07-08T12:01:10.178232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:10.178237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:10.178255Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.178320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.178326Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.178343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178347Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:10.178373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.178421Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.178425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-07-08T12:01:10.178438Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:361:2356], Recipient [1:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:10.178446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:01:10.178454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.178457Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:10.178467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:10.179132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:10.179245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:10.179255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:10.179293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:10.179520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:553:2477], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:555:2478] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:10.179530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:10.179536Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-07-08T12:01:10.179569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-07-08T12:01:10.179638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:557:2480], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.179642Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.179646Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.179663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:545:2470], Recipient [1:361:2356]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-07-08T12:01:10.179667Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-07-08T12:01:10.179680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.179684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-07-08T12:01:10.179689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.195518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:44:2091], Recipient [1:361:2356]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-07-08T12:01:10.195548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-07-08T12:01:10.195554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:10.195616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:10.195629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-07-08T12:01:10.231909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:10.231943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:10.244331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:10.329149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:357:2352], Recipient [1:361:2356]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-07-08T12:01:10.329339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:573:2493], Recipient [1:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.329349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:10.329354Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:10.329379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:522:2449], Recipient [1:361:2356]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-07-08T12:01:10.329384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:01:10.329396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:10.329430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-07-08T12:01:10.329441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.329517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:10.329527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-07-08T12:01:10.329556Z node 1 :FLAT_TX ... ARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state Ready TxInFly 0 2025-07-08T12:01:18.117968Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-07-08T12:01:18.118074Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1379:3092], Recipient [3:361:2356]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:18.118083Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:18.118088Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-07-08T12:01:18.118139Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1052:2827], Recipient [3:361:2356]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1052 RawX2: 12884904715 } Origin: 72075186224037892 State: 2 TxId: 281474976715666 Step: 0 Generation: 1 2025-07-08T12:01:18.118146Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-07-08T12:01:18.118157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1052 RawX2: 12884904715 } Origin: 72075186224037892 State: 2 TxId: 281474976715666 Step: 0 Generation: 1 2025-07-08T12:01:18.118166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715666, tablet: 72075186224037892, partId: 1 2025-07-08T12:01:18.118195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715666:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1052 RawX2: 12884904715 } Origin: 72075186224037892 State: 2 TxId: 281474976715666 Step: 0 Generation: 1 2025-07-08T12:01:18.118206Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715666:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2025-07-08T12:01:18.118215Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715666:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1052 RawX2: 12884904715 } Origin: 72075186224037892 State: 2 TxId: 281474976715666 Step: 0 Generation: 1 2025-07-08T12:01:18.118232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715666:1, shardIdx: 72057594046644480:5, shard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118236Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715666:1, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715666:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118248Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715666:1 129 -> 240 2025-07-08T12:01:18.118302Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715666:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000001Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 11 } 2025-07-08T12:01:18.118318Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:18.118494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715666:1, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118500Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:18.118505Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715666:1 2025-07-08T12:01:18.118518Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1052:2827] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715666 at schemeshard: 72057594046644480 2025-07-08T12:01:18.118543Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037892 state Ready 2025-07-08T12:01:18.118551Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-07-08T12:01:18.118600Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:361:2356], Recipient [3:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:18.118607Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:18.118614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715666:1, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118621Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715666:1 ProgressState 2025-07-08T12:01:18.118632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:18.118637Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:1 progress is 1/2 2025-07-08T12:01:18.118641Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/2 2025-07-08T12:01:18.118647Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 1, blocked: 1 2025-07-08T12:01:18.118661Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715666:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715666 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-07-08T12:01:18.118666Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715666:0 240 -> 240 2025-07-08T12:01:18.118685Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:1 progress is 1/2 2025-07-08T12:01:18.118691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/2 2025-07-08T12:01:18.118697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/2, is published: true 2025-07-08T12:01:18.118760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:18.118765Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715666:0 2025-07-08T12:01:18.118780Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:361:2356], Recipient [3:361:2356]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-07-08T12:01:18.118784Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-07-08T12:01:18.118789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.118793Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715666:0 ProgressState 2025-07-08T12:01:18.118802Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-07-08T12:01:18.118805Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 2/2 2025-07-08T12:01:18.118808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 2/2 2025-07-08T12:01:18.118812Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 2/2 2025-07-08T12:01:18.118815Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 2/2 2025-07-08T12:01:18.118819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 2/2, is published: true 2025-07-08T12:01:18.118828Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1245:2980] message: TxId: 281474976715666 2025-07-08T12:01:18.118833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 2/2 2025-07-08T12:01:18.118840Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-07-08T12:01:18.118844Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715666:0 2025-07-08T12:01:18.118875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-07-08T12:01:18.118879Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 8] was 3 2025-07-08T12:01:18.118885Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:1 2025-07-08T12:01:18.118888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715666:1 2025-07-08T12:01:18.118895Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-07-08T12:01:18.118899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-07-08T12:01:18.118969Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-07-08T12:01:18.118978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1245:2980] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715666 at schemeshard: 72057594046644480 2025-07-08T12:01:18.119129Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1253:2987], Recipient [3:361:2356]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:18.119134Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:18.119140Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T12:01:18.130307Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1379:3092], Recipient [3:361:2356]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:18.130333Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:18.130339Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-07-08T12:01:18.269137Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:361:2356]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-07-08T12:01:18.269174Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-07-08T12:01:18.269194Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:361:2356], Recipient [3:361:2356]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-07-08T12:01:18.269199Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-07-08T12:01:18.422673Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyjv25ajkmbw7g2qngmf4p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDBmNjQyOGMtNzVkNjFjYjEtNzI1NzYwNS1jMGIzY2NlNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-07-08T12:01:09.711228Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:107:2139], Recipient [1:123:2149]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:01:09.713729Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:107:2139], Recipient [1:123:2149]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:01:09.713873Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:123:2149] 2025-07-08T12:01:09.713922Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:09.743628Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:107:2139], Recipient [1:123:2149]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:01:09.756623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:09.756645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:09.758495Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:09.758543Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:01:09.758675Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-07-08T12:01:09.758684Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-07-08T12:01:09.758690Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-07-08T12:01:09.758737Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:01:09.758775Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:01:09.758785Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:192:2149] in generation 2 2025-07-08T12:01:09.789741Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:01:09.799281Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-07-08T12:01:09.799361Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:01:09.799385Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:218:2216] 2025-07-08T12:01:09.799391Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-07-08T12:01:09.799396Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-07-08T12:01:09.799401Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:09.799448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:123:2149], Recipient [1:123:2149]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:09.799454Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:09.799520Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-07-08T12:01:09.799555Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-07-08T12:01:09.799578Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-07-08T12:01:09.799584Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:09.799591Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-07-08T12:01:09.799596Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-07-08T12:01:09.799599Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-07-08T12:01:09.799604Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-07-08T12:01:09.799609Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:09.799620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:214:2213], Recipient [1:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:09.799625Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:09.799631Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:212:2212], serverId# [1:214:2213], sessionId# [0:0:0] 2025-07-08T12:01:09.800043Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:100:2135], Recipient [1:123:2149]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 100 RawX2: 4294969431 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-07-08T12:01:09.800053Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:01:09.800064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-07-08T12:01:09.800093Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-07-08T12:01:09.800103Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-07-08T12:01:09.800112Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-07-08T12:01:09.800119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-07-08T12:01:09.800123Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-07-08T12:01:09.800128Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-07-08T12:01:09.800132Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-07-08T12:01:09.800187Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-07-08T12:01:09.800194Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-07-08T12:01:09.800198Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-07-08T12:01:09.800201Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-07-08T12:01:09.800210Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-07-08T12:01:09.800213Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-07-08T12:01:09.800217Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-07-08T12:01:09.800220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-07-08T12:01:09.800225Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-07-08T12:01:09.821537Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-07-08T12:01:09.821567Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-07-08T12:01:09.821585Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-07-08T12:01:09.821595Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-07-08T12:01:09.821609Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-07-08T12:01:09.821751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:224:2222], Recipient [1:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:09.821760Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:01:09.821770Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:223:2221], serverId# [1:224:2222], sessionId# [0:0:0] 2025-07-08T12:01:09.821780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:100:2135], Recipient [1:123:2149]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-07-08T12:01:09.821784Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-07-08T12:01:09.821820Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-07-08T12:01:09.821828Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-07-08T12:01:09.821833Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-07-08T12:01:09.821838Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-07-08T12:01:09.822560Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 100 RawX2: 4294969431 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-07-08T12:01:09.822582Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:09.822644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:123:2149], Recipient [1:123:2149]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:09.822651Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:01:09.822659Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-07-08T12:01:09.822667Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-07-08T12:01:09.822672Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-07-08T12:01:09.822679Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-07-08T12:01:09.822684Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-07-08T12:01:09.822693Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-07-08T12:01:09.822698Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-07-08T12:01:09.822702Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-07-08T12:01:09.822705Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-07-08T12:01:09.822750Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-07-08T12:01:09.822756Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-07-08T12:01:09.822759Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-07-08T12:01:09.822763Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-07-08T12:01:09.822766Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-07-08T12:01:09.822778Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-07-08T12:01:09.822781Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-07-08T12:01:09.822784Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:09.822788Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-07-08T12:01:09.822799Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-07-08T12:01:09.822802Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-07-08T12:01:09.822805Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-07-08T12:01:09.822811Z node 1 : ... ations at 9437184 2025-07-08T12:01:18.462801Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.462846Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.462872Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 2 ms, propose latency: 4 ms 2025-07-08T12:01:18.462891Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-07-08T12:01:18.462900Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.462910Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.462972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.462980Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.462986Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.462992Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-07-08T12:01:18.462997Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463001Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463005Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463029Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463033Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463041Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463047Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-07-08T12:01:18.463051Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463055Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463059Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463063Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463087Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463093Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463099Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-07-08T12:01:18.463103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463107Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-07-08T12:01:18.463125Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463130Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463135Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463141Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-07-08T12:01:18.463145Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463162Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463166Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463172Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463178Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-07-08T12:01:18.463184Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463206Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463210Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463217Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463222Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-07-08T12:01:18.463227Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463236Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-07-08T12:01:18.463245Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-07-08T12:01:18.463271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463309Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463315Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463321Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 2 ms, propose latency: 4 ms 2025-07-08T12:01:18.463327Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-07-08T12:01:18.463331Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463347Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-07-08T12:01:18.463352Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-07-08T12:01:18.463358Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:100:2135], exec latency: 0 ms, propose latency: 1 ms 2025-07-08T12:01:18.463363Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-07-08T12:01:18.463403Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-07-08T12:01:18.463410Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463418Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-07-08T12:01:18.463580Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-07-08T12:01:18.463588Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463592Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-07-08T12:01:18.463620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-07-08T12:01:18.463626Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463629Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-07-08T12:01:18.463644Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-07-08T12:01:18.463647Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463651Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-07-08T12:01:18.463664Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-07-08T12:01:18.463667Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463670Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-07-08T12:01:18.463685Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-07-08T12:01:18.463688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463691Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-07-08T12:01:18.463705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-07-08T12:01:18.463708Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463711Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-07-08T12:01:18.463726Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:238:2231], Recipient [1:350:2317]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-07-08T12:01:18.463730Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-07-08T12:01:18.463733Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation |69.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::DropRedundantSortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 6424, MsgBus: 13670 2025-07-08T12:01:12.436222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679933508852103:2222];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:12.436343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001146/r3tmp/tmpQbu1le/pdisk_1.dat 2025-07-08T12:01:12.551812Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6424, node 1 2025-07-08T12:01:12.585184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:12.585195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:12.585198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:12.585241Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13670 2025-07-08T12:01:12.596684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.596709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.598871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.651796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.673526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.697210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.723261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.740499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:12.849551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.858669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.870759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.884774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.897881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.912448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.927051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:13.425434Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; [[[3];[300u];["Value4"]];[[0];[0u];["Value1"]]] 2025-07-08T12:01:16.302631Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076340, txId: 281474976715770] shutting down Trying to start YDB, gRPC: 3523, MsgBus: 7485 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001146/r3tmp/tmp7l34kA/pdisk_1.dat 2025-07-08T12:01:16.669760Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:16.683276Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3523, node 2 2025-07-08T12:01:16.701977Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.701991Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.701993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.702037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7485 2025-07-08T12:01:16.769236Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.769267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.773208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:16.806844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.808406Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.820279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.851651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:16.878371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.891357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.093795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.114365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.124801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.138870Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.149612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.164772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.225346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.453690Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679954537576136:2447], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 2025-07-08T12:01:17.454483Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzk1MzViYy02NjFkMzAyNi05NmJlNTc4Zi03Mzk1NTgwMQ==, ActorId: [2:7524679954537576134:2446], ActorState: ExecuteState, TraceId: 01jzmyjt43fky4ww3ma53qm8j0, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029
: Error: Execution, code: 1060
:5:13: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 62728, MsgBus: 16653 2025-07-08T12:01:17.850246Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679955454229050:2146];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.850401Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001146/r3tmp/tmp7Etf4d/pdisk_1.dat 2025-07-08T12:01:17.877891Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62728, node 3 2025-07-08T12:01:17.887422Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.887435Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.887438Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.887484Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16653 TClient is connected to server localhost:16653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.944671Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.944705Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.945683Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:17.951720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.966793Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.989641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.018050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.040548Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.295656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.307787Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.327966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.337552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.352557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.366145Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.378835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.845516Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpScan::LimitOverSecondaryIndexRead [GOOD] >> KqpWrite::Insert >> KqpScan::AggregateEmptySum [GOOD] >> KqpEffects::AlterAfterUpsertTransaction+UseSink >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> KqpImmediateEffects::Replace ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:19.129732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:19.129754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.129760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:19.129764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:19.129777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:19.129780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:19.129796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.129824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:19.129901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:19.140945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:19.140980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:19.144258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:19.144302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:19.144323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:19.145796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:19.145858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:19.145964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.146126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:19.146915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.146952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:19.147165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.147174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.147190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:19.147197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.147203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:19.147228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.148351Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:19.166467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:19.166533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.166582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:19.166614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:19.166621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:19.167286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:19.167296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:19.167299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:19.167622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:19.167902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167910Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.167914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.167919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.168331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:19.172259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:19.172301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:19.172441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.172462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.172472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.172528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:19.172534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.172560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.172568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:19.172963Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.172973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.173009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.173013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:19.173021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.173025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:19.173034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.173037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.173040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.173042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.173045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:19.173049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.173052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:19.173054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:19.173063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:19.173068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:19.173070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:19.173361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:19.173371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... scription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-07-08T12:01:19.185405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:19.185446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-07-08T12:01:19.185469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-07-08T12:01:19.185475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:19.186071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-07-08T12:01:19.186106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-07-08T12:01:19.186134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.186142Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-07-08T12:01:19.186150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.186172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:19.186598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-07-08T12:01:19.186631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-07-08T12:01:19.186700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.186721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.186731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-07-08T12:01:19.186760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-07-08T12:01:19.186798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.186808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:19.187299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.187313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.187342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:19.187368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.187390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-07-08T12:01:19.187395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T12:01:19.187469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.187477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T12:01:19.187488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:01:19.187491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.187496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:01:19.187499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.187503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-07-08T12:01:19.187508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.187512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T12:01:19.187516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T12:01:19.187529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:19.187534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T12:01:19.187538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-07-08T12:01:19.187541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-07-08T12:01:19.187658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.187671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.187680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.187685Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:01:19.187689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:19.187845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.187858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.187862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.187867Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T12:01:19.187871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:19.187884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:01:19.188098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:19.188112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:19.188133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.188770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:01:19.189157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:01:19.189187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-07-08T12:01:19.189236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:01:19.189243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-07-08T12:01:19.189326Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:01:19.189344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.189349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:325:2316] TestWaitNotification: OK eventTxId 102 2025-07-08T12:01:19.189436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:19.189468Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 53us result status StatusPathDoesNotExist 2025-07-08T12:01:19.189514Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:19.286741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:19.286762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.286767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:19.286772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:19.286784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:19.286788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:19.286805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.286829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:19.286903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:19.298004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:19.298028Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:19.305048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:19.305125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:19.305160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:19.306820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:19.306867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:19.306967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.307123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:19.308047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.308091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:19.308338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.308352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.308374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:19.308383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.308389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:19.308419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.309898Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:19.328966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:19.329092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.329195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:19.329275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:19.329301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.330469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.330506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:19.330575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.330587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:19.330592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:19.330597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:19.331307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.331320Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:19.331326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:19.331921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.331938Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.331945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.331953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.332610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:19.333137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:19.333189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:19.333386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.333413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.333424Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.333499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:19.333506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.333536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.333548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:19.334050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.334058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.334105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.334110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:19.334120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.334126Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:19.334137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.334141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.334146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.334149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.334153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:19.334158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.334163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:19.334166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:19.334177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:19.334183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:19.334187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:19.334591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:19.334606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... 5-07-08T12:01:19.361681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T12:01:19.361688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-07-08T12:01:19.361691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-07-08T12:01:19.361895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-07-08T12:01:19.361903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.361908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-07-08T12:01:19.361913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-07-08T12:01:19.362144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.362157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.362161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.362166Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-07-08T12:01:19.362171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-07-08T12:01:19.365226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.365272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.365280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.365286Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-07-08T12:01:19.365292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:01:19.365321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:01:19.366154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:01:19.366440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-07-08T12:01:19.366514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-07-08T12:01:19.366521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-07-08T12:01:19.366562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-07-08T12:01:19.366565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-07-08T12:01:19.366575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:01:19.366578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-07-08T12:01:19.366669Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-07-08T12:01:19.366694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.366700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:338:2329] 2025-07-08T12:01:19.366718Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-07-08T12:01:19.366729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:01:19.366739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.366742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:338:2329] 2025-07-08T12:01:19.366752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.366755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:338:2329] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-07-08T12:01:19.366830Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:19.366875Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 62us result status StatusSuccess 2025-07-08T12:01:19.367000Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.367065Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:19.367087Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 24us result status StatusSuccess 2025-07-08T12:01:19.367136Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.367178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:19.367191Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 15us result status StatusSuccess 2025-07-08T12:01:19.367219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] Test command err: Trying to start YDB, gRPC: 14457, MsgBus: 24737 2025-07-08T12:01:16.645191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679951772562211:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.702725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c11/r3tmp/tmp0nyT4i/pdisk_1.dat 2025-07-08T12:01:16.734477Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14457, node 1 2025-07-08T12:01:16.758633Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.758645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.758647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.758685Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24737 2025-07-08T12:01:16.805996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.806023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.807116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.830212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.836663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.844497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.871677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.909194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.920004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.055121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.069364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.134144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.151296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.209029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.225190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.239166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.387098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.398662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.410918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.630986Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 22193, MsgBus: 15624 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001c11/r3tmp/tmp5ylrNW/pdisk_1.dat 2025-07-08T12:01:18.484903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:18.492906Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22193, node 2 2025-07-08T12:01:18.515070Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:18.515086Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:18.515087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:18.515128Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15624 2025-07-08T12:01:18.576507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:18.576541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:18.577729Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15624 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:18.585057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.589894Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:18.600816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.634007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.692358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:18.710927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.884843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.952764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.019391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.032825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.045282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.059771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.071783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.254943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::LimitOverSecondaryIndexRead [GOOD] Test command err: Trying to start YDB, gRPC: 62123, MsgBus: 26023 2025-07-08T12:01:15.920620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679945865526806:2179];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.920692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f1/r3tmp/tmpaLotmn/pdisk_1.dat 2025-07-08T12:01:15.973604Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62123, node 1 2025-07-08T12:01:16.000885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.000897Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.000899Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.000941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26023 TClient is connected to server localhost:26023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.043057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.043085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.043604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.045075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:16.053065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.065431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.089117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.117790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.145136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.329497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.348975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.357104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.371076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.386307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.399256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.412260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.641984Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679950160496440:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjsa94rzgap0mvy21hpd4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU1ODcxNmQtNTgyMDVhODQtYmJjYWU0MjAtNzlmNTJlYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.644829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076683, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 63201, MsgBus: 27701 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f1/r3tmp/tmpqEuqSj/pdisk_1.dat 2025-07-08T12:01:16.848582Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679953345428350:2241];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.854399Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:16.859912Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63201, node 2 2025-07-08T12:01:16.877915Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.877930Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.877933Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.877985Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27701 2025-07-08T12:01:16.945087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.945119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.946159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27701 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.957455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.964458Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.974566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.985912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.008991Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.019815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.234335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.248104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.274965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.294233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.305782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.326794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.388548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.845011Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:17.855675Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976077691, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 9424, MsgBus: 27047 2025-07-08T12:01:18.238673Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679961121234022:2133];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:18.241567Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f1/r3tmp/tmpNGaQmV/pdisk_1.dat 2025-07-08T12:01:18.260206Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9424, node 3 2025-07-08T12:01:18.285107Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:18.285123Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:18.285125Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:18.285178Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27047 TClient is connected to server localhost:27047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:18.348028Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:18.348062Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:18.348437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.349009Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:18.377351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.405909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.435611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:18.452140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.650699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.671142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.731189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.753720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.765961Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.785105Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.797770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.987492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.000091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.017330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.240046Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; {"Plan":{"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"E-Size":"No estimate","PlanNodeId":4,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/SecondaryComplexKeys","Columns":["Fk1","Fk2","Key","Value"],"E-Rows":"No estimate","Table":"SecondaryComplexKeys","Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["SecondaryComplexKeys\/Index\/indexImplTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"2"},{"Scan":"Parallel","ReadRange":["Fk1 (1)","Fk2 (-∞, +∞)","Key (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","E-Rows":"No estimate","Table":"SecondaryComplexKeys\/Index\/indexImplTable","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"2"}],"Node Type":"Limit"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryComplexKeys","reads":[{"lookup_by":["Key"],"columns":["Fk1","Fk2","Key","Value"],"type":"Lookup"}]},{"name":"\/Root\/SecondaryComplexKeys\/Index\/indexImplTable","reads":[{"lookup_by":["Fk1 (1)"],"columns":["Key"],"scan_by":["Fk2 (-∞, +∞)","Key (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Columns":["Fk1","Fk2","Key","Value"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"SecondaryComplexKeys","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:127:2058] recipient: [1:109:2141] 2025-07-08T12:01:19.676496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:19.676521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.676526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:19.676531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:19.676543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:19.676546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:19.676559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:19.676572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:19.676640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:19.697608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-07-08T12:01:19.697631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:19.703483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:19.703548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:19.703578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:19.705013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:19.705061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:19.705163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.705314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:19.706225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.706263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:19.706477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.706486Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.706502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:19.706508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.706514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:19.706538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.707729Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:125:2151] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:19.728026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:19.728099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.728153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:19.728190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:19.728200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.728876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.728897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:19.728936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.728944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:19.728965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:19.728970Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:19.729291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.729300Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:19.729304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:19.729586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.729594Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.729599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.729605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.730196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:19.730551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:19.730583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:19.730748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.730769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.730776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.730840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:19.730847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:19.730873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.730884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:19.731232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.731239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.731280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.731285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:19.731293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.731298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:19.731309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.731316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.731321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:19.731323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.731327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:19.731332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:19.731336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:19.731339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:19.731349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:19.731354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:19.731358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:19.731720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:19.731738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cooki ... LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2025-07-08T12:01:19.740195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-07-08T12:01:19.740209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp:127, operation: DROP VIEW, path: /MyRoot/MyView 2025-07-08T12:01:19.740438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-07-08T12:01:19.740466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-07-08T12:01:19.740526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:19.740544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:19.740552Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-07-08T12:01:19.740577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-07-08T12:01:19.740600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.740608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-07-08T12:01:19.740997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:19.741005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:19.741028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:19.741048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:19.741053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-07-08T12:01:19.741057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-07-08T12:01:19.741153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-07-08T12:01:19.741161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-07-08T12:01:19.741171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:01:19.741175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.741182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-07-08T12:01:19.741185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.741189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-07-08T12:01:19.741193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-07-08T12:01:19.741197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-07-08T12:01:19.741201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-07-08T12:01:19.741212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:19.741218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-07-08T12:01:19.741222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-07-08T12:01:19.741226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-07-08T12:01:19.741333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.741345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.741349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.741354Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-07-08T12:01:19.741358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:19.741463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.741473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-07-08T12:01:19.741476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-07-08T12:01:19.741481Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T12:01:19.741485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:19.741495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-07-08T12:01:19.741560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:19.741566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:19.741574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:19.742183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:01:19.742229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-07-08T12:01:19.742456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-07-08T12:01:19.742560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-07-08T12:01:19.742567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-07-08T12:01:19.742581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-07-08T12:01:19.742584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-07-08T12:01:19.742651Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-07-08T12:01:19.742667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.742672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:331:2322] 2025-07-08T12:01:19.742690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-07-08T12:01:19.742700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-07-08T12:01:19.742703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-07-08T12:01:19.742764Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:19.742789Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 33us result status StatusPathDoesNotExist 2025-07-08T12:01:19.742824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 11913, MsgBus: 4099 2025-07-08T12:01:15.585807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679947645938832:2066];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:15.585825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f7/r3tmp/tmpLG378R/pdisk_1.dat 2025-07-08T12:01:15.657878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11913, node 1 2025-07-08T12:01:15.670642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:15.670657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:15.670659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:15.670698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4099 2025-07-08T12:01:15.691257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:15.691289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:15.693239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:15.763105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.765985Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:15.790217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:15.860689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:15.891744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:15.918973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.014291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.022642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.083352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.094968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.108031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.165521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.184244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.456916Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679951940908648:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmyjs2257ts4hy7xtek0t3q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmYyMzRhNTYtZTNlNmQxODctNGY5NWM3NTctOWViNWEyNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:16.955129Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:17.062776Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976076501, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 5260, MsgBus: 21006 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f7/r3tmp/tmpvIGcFp/pdisk_1.dat 2025-07-08T12:01:17.375243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.375278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.376895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:17.377300Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5260, node 2 2025-07-08T12:01:17.385615Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.385626Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.385628Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.385674Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21006 TClient is connected to server localhost:21006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.509497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.521151Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:17.533957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.560931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.603693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.619016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.750229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.762237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.777726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.788916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.800395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.817725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.838179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.427834Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:18.581086Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976078202, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 11947, MsgBus: 5356 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010f7/r3tmp/tmpPqXYUB/pdisk_1.dat 2025-07-08T12:01:19.015484Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:19.020198Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11947, node 3 2025-07-08T12:01:19.030951Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:19.030962Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:19.030964Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:19.031012Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5356 TClient is connected to server localhost:5356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:19.109309Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:19.109344Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:19.109780Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:19.110032Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.117447Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:19.130124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.150108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.182246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.208402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.335842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.351290Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.364385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.376621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.387178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.402987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.415867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.721664Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976079693, txId: 281474976715670] shutting down >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpEffects::InsertAbort_Params_Success >> KqpEffects::UpdateOn_Select >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpEffects::UpdateOn_Literal >> KqpEffects::AlterAfterUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> KqpImmediateEffects::ReplaceExistingKey >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpWrite::Insert [GOOD] >> KqpWrite::CastValuesOptional ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 28311, MsgBus: 27117 2025-07-08T12:01:16.489243Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679953849030858:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.579262Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e46/r3tmp/tmpk72aKy/pdisk_1.dat 2025-07-08T12:01:16.628220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28311, node 1 2025-07-08T12:01:16.665217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.665229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.665232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.665271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27117 2025-07-08T12:01:16.690991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.691021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.692133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.759392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.762532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.779085Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-07-08T12:01:16.974184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.033928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.089958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.099030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20163, MsgBus: 16248 2025-07-08T12:01:17.461228Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679955463752914:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.463929Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e46/r3tmp/tmp4nMGdd/pdisk_1.dat 2025-07-08T12:01:17.477423Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20163, node 2 2025-07-08T12:01:17.494903Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.494916Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.494918Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.494960Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16248 TClient is connected to server localhost:16248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:17.569757Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.569797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.570165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.573076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:17.573545Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.585413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.610414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:17.632398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.652335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.813165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.822203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.833737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.854793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.867422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.874927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.888697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.460693Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:18.466677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:18.566959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.648779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:18.744449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.820396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.888289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.963671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:18.977554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.310768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61640, MsgBus: 29494 2025-07-08T12:01:19.705753Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679966951196909:2193];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:19.707022Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e46/r3tmp/tmp0cCfQ0/pdisk_1.dat 2025-07-08T12:01:19.729924Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61640, node 3 2025-07-08T12:01:19.748673Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:19.748684Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:19.748686Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:19.748728Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29494 TClient is connected to server localhost:29494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:19.809502Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:19.809534Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-07-08T12:01:19.810031Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.811128Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:19.815207Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:19.830838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.850543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.898049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:19.972442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.053272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.062079Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.133082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.192880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.203543Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.228549Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.253177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11310, MsgBus: 19634 2025-07-08T12:01:17.045312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679954830309786:2151];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:17.046052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bfe/r3tmp/tmpjhd2yv/pdisk_1.dat 2025-07-08T12:01:17.125272Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11310, node 1 2025-07-08T12:01:17.144917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:17.144941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:17.157135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:17.157151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:17.157153Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:17.157197Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:17.157259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19634 TClient is connected to server localhost:19634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.251604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.255712Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:17.262937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.340477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.389122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.410104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:17.512824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.521472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.578221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.589842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.602715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.616656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.631332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1942, MsgBus: 14696 2025-07-08T12:01:18.238748Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679959191868333:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bfe/r3tmp/tmpRt7kgh/pdisk_1.dat 2025-07-08T12:01:18.243738Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:18.251451Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1942, node 2 2025-07-08T12:01:18.269120Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:18.269135Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:18.269138Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:18.269182Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14696 TClient is connected to server localhost:14696 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:18.341370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:18.341405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:18.341873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.344779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:18.346681Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:18.350067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.370100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.393184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.404736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.593756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.617123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.641165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.656441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.679317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.691974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.750413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.939226Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:18.940792Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:18.940838Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:18.940933Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679959191870703:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7524679959191870672:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7524679959191870703:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:18.941132Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679959191870696:2455], SessionActorId: [2:7524679959191870672:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7524679959191870672:2455]. isRollback=0 2025-07-08T12:01:18.941260Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzOTU4NDUtNDFmODQzNTctZDZlZDVhZmYtNzU0OTdjNTk=, ActorId: [2:7524679959191870672:2455], ActorState: ExecuteState, TraceId: 01jzmyjvj975syf6pfqc02441z, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7524679959191870697:2455] from: [2:7524679959191870696:2455] 2025-07-08T12:01:18.941368Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524679959191870697:2455] TxId: 281474976715670. Ctx: { TraceId: 01jzmyjvj975syf6pfqc02441z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmIzOTU4NDUtNDFmODQzNTctZDZlZDVhZmYtNzU0OTdjNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:18.941436Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmIzOTU4NDUtNDFmODQzNTctZDZlZDVhZmYtNzU0OTdjNTk=, ActorId: [2:7524679959191870672:2455], ActorState: ExecuteState, TraceId: 01jzmyjvj975syf6pfqc02441z, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 32063, MsgBus: 24235 2025-07-08T12:01:19.197145Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679963511570043:2070];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bfe/r3tmp/tmpxQEv0H/pdisk_1.dat 2025-07-08T12:01:19.197216Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:19.225721Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32063, node 3 2025-07-08T12:01:19.239104Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:19.239119Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:19.239121Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:19.239170Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24235 2025-07-08T12:01:19.298323Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:19.298355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:19.305494Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:19.362221Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.366339Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.440119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.462248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.493535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.518358Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:19.676762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.688489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.711530Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.728966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.740763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.759488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.817804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.160012Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679967806539835:2455], TxId: 281474976715671, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jzmyjwq34h9q5wg21vvr8fgb. SessionId : ydb://session/3?node_id=3&id=NjBiNzJkZjctMzYzMDgxNTUtZDQ3YjVkMDQtNjdkMzAwOGY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:20.160151Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679967806539836:2456], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jzmyjwq34h9q5wg21vvr8fgb. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NjBiNzJkZjctMzYzMDgxNTUtZDQ3YjVkMDQtNjdkMzAwOGY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [3:7524679967806539832:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:20.160205Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NjBiNzJkZjctMzYzMDgxNTUtZDQ3YjVkMDQtNjdkMzAwOGY=, ActorId: [3:7524679967806539803:2446], ActorState: ExecuteState, TraceId: 01jzmyjwq34h9q5wg21vvr8fgb, Create QueryResponse for error on request, msg: 2025-07-08T12:01:20.209148Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |69.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::ReplaceDuplicates >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |69.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |69.3%| [TA] {RESULT} $(B)/ydb/tests/functional/benchmarks_init/test-results/py3test/{meta.json ... results_accumulator.log} |69.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |69.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::Insert >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpImmediateEffects::UpdateAfterInsert >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::UpdateOn_Params >> KqpEffects::AlterAfterUpsertTransaction-UseSink [GOOD] >> KqpEffects::UpdateOn_Select [GOOD] >> KqpEffects::InsertAbort_Select_Success >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpWrite::CastValuesOptional [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes_reboots/unittest >> TUserAttrsTestWithReboots::AllowedSymbolsReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:32.021540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:32.021563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:32.021568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:32.021573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:32.021585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:32.021589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:32.021597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:32.021614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:32.021699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:32.034320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:32.034346Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:32.037663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:32.037707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:32.037734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:32.039261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:32.039403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:32.039507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:32.039557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:32.040007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:32.040046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:32.040276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:32.040285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:32.040303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:32.040310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:32.040319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:32.040352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:32.041514Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:32.056844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:32.056926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.057037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:32.057080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:32.057091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.060787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:32.060827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:32.060882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.060908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:32.060914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:32.060920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:32.061489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.061502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:32.061507Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:32.061837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.061847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.061852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:32.061860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:32.062480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:32.062829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:32.062872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:32.063072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:32.063096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:32.063103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:32.063172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:32.063179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:32.063208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:32.063220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:32.063583Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:32.063592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:32.063639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:32.063644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:32.063714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:32.063721Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:32.063733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:32.063737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:32.063742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:32.063745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:32.063749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:32.063755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... .953503Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1006, ready parts: 1/1, is published: false 2025-07-08T12:01:20.953508Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1006 ready parts: 1/1 2025-07-08T12:01:20.953514Z node 127 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1006:0 2025-07-08T12:01:20.953518Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1006:0 2025-07-08T12:01:20.953528Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-07-08T12:01:20.953534Z node 127 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1006, publications: 2, subscribers: 0 2025-07-08T12:01:20.953541Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 5], 5 2025-07-08T12:01:20.953544Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1006, [OwnerId: 72057594046678944, LocalPathId: 6], 3 2025-07-08T12:01:20.953666Z node 127 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:01:20.953676Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:01:20.953681Z node 127 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T12:01:20.953685Z node 127 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-07-08T12:01:20.953689Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T12:01:20.953779Z node 127 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:01:20.953788Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 1006 2025-07-08T12:01:20.953792Z node 127 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1006 2025-07-08T12:01:20.953795Z node 127 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1006, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-07-08T12:01:20.953799Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-07-08T12:01:20.953808Z node 127 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1006, subscribers: 0 2025-07-08T12:01:20.959128Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 2025-07-08T12:01:20.959206Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1006 TestModificationResult got TxId: 1006, wait until txId: 1006 TestModificationResults wait txId: 1007 2025-07-08T12:01:20.959938Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/Dir@" OperationType: ESchemeOpMkDir MkDir { Name: "Dir@" } } TxId: 1007 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:20.960596Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1007, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/Dir@/Dir@\', error: symbol \'@\' is not allowed in the path part \'Dir@\', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:941" TxId: 1007 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:20.960633Z node 127 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1007, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/Dir@/Dir@', error: symbol '@' is not allowed in the path part 'Dir@', source_location: ydb/core/tx/schemeshard/schemeshard__operation.cpp:941, operation: CREATE DIRECTORY, path: /MyRoot/Dir@/Dir@ TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1006 2025-07-08T12:01:20.960695Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: send EvNotifyTxCompletion 2025-07-08T12:01:20.960702Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1006 2025-07-08T12:01:20.960785Z node 127 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1006, at schemeshard: 72057594046678944 2025-07-08T12:01:20.960803Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: got EvNotifyTxCompletionResult 2025-07-08T12:01:20.960808Z node 127 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1006: satisfy waiter [127:561:2517] TestWaitNotification: OK eventTxId 1006 2025-07-08T12:01:20.960879Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:20.960910Z node 127 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 37us result status StatusSuccess 2025-07-08T12:01:20.961028Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Dir0:" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1000 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:20.961092Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir@" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:20.961105Z node 127 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir@" took 15us result status StatusSuccess 2025-07-08T12:01:20.961142Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir@" PathDescription { Self { Name: "Dir@" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1005 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Dir!" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1006 CreateStep: 5000006 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:20.961185Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir!" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:20.961194Z node 127 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir!" took 11us result status StatusSuccess 2025-07-08T12:01:20.961219Z node 127 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir!" PathDescription { Self { Name: "Dir!" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::BigRow >> TOlapReboots::DropMultipleTables [GOOD] >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 61321, MsgBus: 62918 2025-07-08T12:01:19.826299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679962719324779:2244];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bc1/r3tmp/tmpiPC1jN/pdisk_1.dat 2025-07-08T12:01:19.850664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:19.883096Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61321, node 1 2025-07-08T12:01:19.908184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:19.908195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:19.908197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:19.908240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62918 2025-07-08T12:01:19.950763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:19.950790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:19.957402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62918 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.013788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.024125Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.078270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.139642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:20.178185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.195868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.355170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.427678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.441576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.461009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.480676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.538836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.558257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.724369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.788521Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679967014294460:2465], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jzmyjxazev8v078ejnjch0t3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:20.788726Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679967014294461:2466], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jzmyjxazev8v078ejnjch0t3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [1:7524679967014294457:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:20.788839Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==, ActorId: [1:7524679967014294338:2446], ActorState: ExecuteState, TraceId: 01jzmyjxazev8v078ejnjch0t3, Create QueryResponse for error on request, msg:
: Error: Conflict with existing key., code: 2012 2025-07-08T12:01:20.825034Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:20.835859Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679967014294502:2477], TxId: 281474976715675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==. TraceId : 01jzmyjxctcbj36rsh0tapxn9f. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T12:01:20.835963Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679967014294503:2478], TxId: 281474976715675, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==. TraceId : 01jzmyjxctcbj36rsh0tapxn9f. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [1:7524679967014294499:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:20.836174Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI2OGJkNmYtMjFiY2QzYWMtOTIwMmM0OTItYjA2NDc3Zg==, ActorId: [1:7524679967014294338:2446], ActorState: ExecuteState, TraceId: 01jzmyjxctcbj36rsh0tapxn9f, Create QueryResponse for error on request, msg:
: Error: Duplicated keys found., code: 2012 Trying to start YDB, gRPC: 24746, MsgBus: 11215 2025-07-08T12:01:21.152520Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679973820536672:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.153360Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bc1/r3tmp/tmpQn4JH9/pdisk_1.dat 2025-07-08T12:01:21.177092Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24746, node 2 2025-07-08T12:01:21.193273Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.193285Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.193287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.193323Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11215 2025-07-08T12:01:21.261282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.261308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.263234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:21.281616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.283772Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.293500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.322116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:21.346511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.365436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.551950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.561297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.579196Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.595339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.606726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.624367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.641376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 14437, MsgBus: 17096 2025-07-08T12:01:20.115396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679967224141194:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.115420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bac/r3tmp/tmpM6Gt07/pdisk_1.dat 2025-07-08T12:01:20.187742Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14437, node 1 2025-07-08T12:01:20.204072Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.204085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.204087Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.204145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:20.215581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.215612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.216692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17096 TClient is connected to server localhost:17096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:20.256476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.260478Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.344168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.412598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.474507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.484693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.549811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.563439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.577308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.590830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.604814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.618477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.632734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.776893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12425, MsgBus: 26627 2025-07-08T12:01:21.226971Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679975267151698:2153];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bac/r3tmp/tmpl9OglS/pdisk_1.dat 2025-07-08T12:01:21.236220Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:21.257752Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12425, node 2 2025-07-08T12:01:21.264754Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.264766Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.264769Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.264812Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26627 TClient is connected to server localhost:26627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.334763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.334792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.335103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.335926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:21.346077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.358370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.385422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.400600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.665728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.674700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.682413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.697106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.712304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.728562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.739132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.875015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7336, MsgBus: 25561 2025-07-08T12:01:20.708776Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679971105510409:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.708963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b9a/r3tmp/tmpcEf2U5/pdisk_1.dat 2025-07-08T12:01:20.781919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7336, node 1 2025-07-08T12:01:20.798333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.798350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.798352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.798388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:20.812164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.812192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.812898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25561 TClient is connected to server localhost:25561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.872083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.874994Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.886016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.961243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.992147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.006997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.134840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.142910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.151733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.165034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.172451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.189376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.205129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.364615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14986, MsgBus: 3498 2025-07-08T12:01:21.608030Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679972598692912:2235];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.608914Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b9a/r3tmp/tmpbdrpdq/pdisk_1.dat 2025-07-08T12:01:21.625268Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14986, node 2 2025-07-08T12:01:21.634529Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.634543Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.634545Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.634596Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3498 TClient is connected to server localhost:3498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.713385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.713414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.713741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.715365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:21.721071Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.728502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.746143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.767378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.779281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.962684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.017886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.026027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.043087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.057474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.073051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.086582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.223519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 16385, MsgBus: 17268 2025-07-08T12:01:20.628709Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679970389467417:2067];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.628744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b94/r3tmp/tmppDIoKt/pdisk_1.dat 2025-07-08T12:01:20.686708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16385, node 1 2025-07-08T12:01:20.706360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.706376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.706378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.706418Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17268 2025-07-08T12:01:20.730634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.730659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.731765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.775436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.777426Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.795783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.883558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.925932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.993911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.109346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.128898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.144359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.206317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.222708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.234888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.249363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.419558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61835, MsgBus: 64109 2025-07-08T12:01:21.817165Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679974881321522:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b94/r3tmp/tmp6GMhud/pdisk_1.dat 2025-07-08T12:01:21.829550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 61835, node 2 2025-07-08T12:01:21.865955Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:21.869107Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.869116Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.869118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.869163Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64109 TClient is connected to server localhost:64109 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:21.924192Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.924217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.926486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.927187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:21.929539Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.935181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.961525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.992412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.003963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.187232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.194830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.208201Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.222660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.236181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.250774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.264824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.512759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] Test command err: Trying to start YDB, gRPC: 7710, MsgBus: 13837 2025-07-08T12:01:20.891697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679970917732336:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.891735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b91/r3tmp/tmpIQkirw/pdisk_1.dat 2025-07-08T12:01:20.956134Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7710, node 1 2025-07-08T12:01:20.999091Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.999102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.999104Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.999145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13837 2025-07-08T12:01:21.030581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.030602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.032439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.080816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.091806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.159528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.181199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.205015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.277255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.288064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.299928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.312421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.326563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.344664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.356251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12400, MsgBus: 24707 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b91/r3tmp/tmpy3YZc8/pdisk_1.dat 2025-07-08T12:01:21.874416Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679974632171624:2078];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.874594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:21.887735Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12400, node 2 2025-07-08T12:01:21.898813Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.898826Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.898828Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.898871Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24707 TClient is connected to server localhost:24707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.972966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.972996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.973290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.973771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:21.974780Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.980873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:21.991459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.009617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.028539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.183250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.191014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.201310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.214797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.231454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.243028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.256869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.514475Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.795396Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzkzNTg2Y2UtZGY1OGVmMmUtNmE4MzlhMDctMWJjNzQyYTI=, ActorId: [2:7524679978927141620:2481], ActorState: ExecuteState, TraceId: 01jzmyjzb07nstgpwfvb49zssh, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-07-08T12:01:22.797545Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzkzNTg2Y2UtZGY1OGVmMmUtNmE4MzlhMDctMWJjNzQyYTI=, ActorId: [2:7524679978927141620:2481], ActorState: ReadyState, TraceId: 01jzmyjzbdavaz5rc9fxkrhxy2, Create QueryResponse for error on request, msg: 2025-07-08T12:01:22.873057Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpInplaceUpdate::BigRow [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Params [GOOD] Test command err: Trying to start YDB, gRPC: 18333, MsgBus: 25861 2025-07-08T12:01:21.019892Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679972894101755:2224];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.022308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b8e/r3tmp/tmprHzMDs/pdisk_1.dat 2025-07-08T12:01:21.095240Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18333, node 1 2025-07-08T12:01:21.122460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.122474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.122476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.122516Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25861 2025-07-08T12:01:21.168531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.168552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:25861 2025-07-08T12:01:21.169649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.182084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.192101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.255359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.278196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.289291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.368367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.376168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.388361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.403560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.420460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.433060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.452337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23506, MsgBus: 29458 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b8e/r3tmp/tmpvSehCV/pdisk_1.dat 2025-07-08T12:01:21.934285Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679973747401958:2085];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.935800Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 23506, node 2 2025-07-08T12:01:21.951453Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:21.959775Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.959785Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.959786Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.959828Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29458 TClient is connected to server localhost:29458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:22.041278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.041303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.041664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.042419Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:22.044906Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.052152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.068168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.095638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.105617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.260450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.281675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.291462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.308299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.330630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.389981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.407169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::DropMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:36.500725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:36.500745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.500750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:36.500754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:36.500758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:36.500761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:36.500769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:36.500786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:36.500871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:36.512439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:36.512462Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.515295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:36.515328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:36.515358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:36.516800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:36.516942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:36.517045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.517093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:36.517507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.517545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:36.517728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.517737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.517753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:36.517759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.517764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:36.517793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:36.518819Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:36.537586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:36.537636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.537681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:36.537716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:36.537726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.538548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.538571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:36.538605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.538613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:36.538617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:36.538622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:36.538972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.538983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:36.538987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:36.545154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.545169Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.545175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.545181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.545767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:36.546172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:36.546218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:36.546388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:36.546410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:36.546417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.546486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:36.546493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:36.546518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:36.546532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:36.546901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:36.546908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:36.546946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:36.546950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:36.547010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:36.547016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:36.547027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.547031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.547035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:36.547038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:36.547042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:36.547046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... -08T12:01:22.197791Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:01:22.197822Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-07-08T12:01:22.197854Z node 83 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:22.197859Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-07-08T12:01:22.197865Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 3 2025-07-08T12:01:22.197869Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [83:207:2209], at schemeshard: 72057594046678944, txId: 1005, path id: 5 2025-07-08T12:01:22.198023Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:01:22.198034Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 1005:0 ProgressState at schemeshard: 72057594046678944 2025-07-08T12:01:22.198043Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 1005:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:01:22.198176Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.198191Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.198199Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T12:01:22.198205Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-07-08T12:01:22.198210Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T12:01:22.198363Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.198376Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.198379Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T12:01:22.198384Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:01:22.198388Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:22.200554Z node 83 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.200580Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.200585Z node 83 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-07-08T12:01:22.200590Z node 83 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 11 2025-07-08T12:01:22.200596Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:01:22.200617Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-07-08T12:01:22.200926Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1005:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:01:22.201421Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.201513Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.203188Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-07-08T12:01:22.216491Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-07-08T12:01:22.216518Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1005, tablet: 72075186233409546, partId: 0 2025-07-08T12:01:22.216545Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1005:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1005 2025-07-08T12:01:22.216561Z node 83 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1005:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 1005 2025-07-08T12:01:22.217303Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:01:22.217355Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-07-08T12:01:22.217363Z node 83 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 1005:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:22.217388Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T12:01:22.217405Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T12:01:22.217409Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:01:22.217414Z node 83 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1005:0 progress is 1/1 2025-07-08T12:01:22.217417Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:01:22.217422Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: true 2025-07-08T12:01:22.217438Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [83:361:2339] message: TxId: 1005 2025-07-08T12:01:22.217444Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-07-08T12:01:22.217448Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1005:0 2025-07-08T12:01:22.217453Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1005:0 2025-07-08T12:01:22.217479Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-07-08T12:01:22.217582Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:22.217589Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-07-08T12:01:22.217601Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:01:22.218445Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-07-08T12:01:22.218459Z node 83 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [83:526:2496] 2025-07-08T12:01:22.218539Z node 83 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1005 2025-07-08T12:01:22.218643Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:22.218692Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 61us result status StatusPathDoesNotExist 2025-07-08T12:01:22.218736Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/OlapStore/ColumnTable1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T12:01:22.218811Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:22.218826Z node 83 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 17us result status StatusPathDoesNotExist 2025-07-08T12:01:22.218840Z node 83 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore/ColumnTable2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/OlapStore\' (id: [OwnerId: 72057594046678944, LocalPathId: 3]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/OlapStore/ColumnTable2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/OlapStore" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13577, MsgBus: 24858 2025-07-08T12:01:20.038368Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679970072772634:2140];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.038914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bf0/r3tmp/tmpTID3qr/pdisk_1.dat 2025-07-08T12:01:20.106258Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13577, node 1 2025-07-08T12:01:20.136907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.136918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.136920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.137130Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:20.175247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.175281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:24858 2025-07-08T12:01:20.179532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.235081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.238035Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.246746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:20.318759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.342296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.368582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.474869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.485935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.513320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.529077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.545025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.556991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.580742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.727080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.763887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.771374Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-07-08T12:01:20.772886Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037922 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-07-08T12:01:20.772925Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037922 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-07-08T12:01:20.772993Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679970072775130:2455], Table: `/Root/TestTable` ([72057594046644480:13:1]), SessionActorId: [1:7524679970072775020:2455]Got LOCKS BROKEN for table `/Root/TestTable`. ShardID=72075186224037922, Sink=[1:7524679970072775130:2455].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-07-08T12:01:20.773122Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679970072775123:2455], SessionActorId: [1:7524679970072775020:2455], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7524679970072775020:2455]. isRollback=0 2025-07-08T12:01:20.773177Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFkYTNkOWMtYWExNWRiNTQtYzBmOTY2OGYtM2VjNmJkN2E=, ActorId: [1:7524679970072775020:2455], ActorState: ExecuteState, TraceId: 01jzmyjxc22wbf1c5y7retzgdf, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7524679970072775186:2455] from: [1:7524679970072775123:2455] 2025-07-08T12:01:20.773280Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679970072775186:2455] TxId: 281474976710674. Ctx: { TraceId: 01jzmyjxc22wbf1c5y7retzgdf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFkYTNkOWMtYWExNWRiNTQtYzBmOTY2OGYtM2VjNmJkN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TestTable`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-07-08T12:01:20.773324Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFkYTNkOWMtYWExNWRiNTQtYzBmOTY2OGYtM2VjNmJkN2E=, ActorId: [1:7524679970072775020:2455], ActorState: ExecuteState, TraceId: 01jzmyjxc22wbf1c5y7retzgdf, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14045, MsgBus: 15187 2025-07-08T12:01:20.925552Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679967923121844:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.926423Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bf0/r3tmp/tmp38PlTl/pdisk_1.dat 2025-07-08T12:01:20.942054Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14045, node 2 2025-07-08T12:01:20.953814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.953825Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.953827Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.953889Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15187 TClient is connected to server localhost:15187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.028993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.029021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.029365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.030352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:21.031441Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.049586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.073190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.124116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.140660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.305837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.316014Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.329098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.340335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.353780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.369101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.389817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.553108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.604298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.619431Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDZkMzk1Zi1lNTFiYTQ0Ny00Y2RhMTNmNC03NzkzNjU0NA==, ActorId: [2:7524679972218091561:2446], ActorState: ExecuteState, TraceId: 01jzmyjy6jcyc3qq7fbpa4f1pq, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29768, MsgBus: 6795 2025-07-08T12:01:21.931837Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679975037483946:2226];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.931922Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001bf0/r3tmp/tmpeJYhzX/pdisk_1.dat 2025-07-08T12:01:21.966840Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29768, node 3 2025-07-08T12:01:21.996727Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.996739Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.996741Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.996775Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6795 TClient is connected to server localhost:6795 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:22.045260Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.045285Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.046119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.047644Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.049326Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:22.057425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.119013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.143740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.159195Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.341700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.352172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.370869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.399913Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.416500Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.438652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.456664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.714904Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.836865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.863650Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Yzk1ZjkwODgtNTRiZGM1MDgtYTQ2ZjEwNzMtY2Q1YjI0MjM=, ActorId: [3:7524679979332453556:2446], ActorState: ExecuteState, TraceId: 01jzmyjzdc3j21693rvpz31drk, Create QueryResponse for error on request, msg: 2025-07-08T12:01:22.927932Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12195, MsgBus: 10183 2025-07-08T12:01:20.928582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679970849211271:2144];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.967612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001ba0/r3tmp/tmpSzGtJA/pdisk_1.dat 2025-07-08T12:01:21.034718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.034740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.036640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:21.038248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12195, node 1 2025-07-08T12:01:21.061201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.061219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.061221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.061269Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10183 TClient is connected to server localhost:10183 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.118025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.121468Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.136199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.205793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.241382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.276076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.355621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.412811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.425588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.438582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.459872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.516776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.531040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.709668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2326, MsgBus: 20677 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001ba0/r3tmp/tmpikI9x7/pdisk_1.dat 2025-07-08T12:01:22.178698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:22.183664Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2326, node 2 2025-07-08T12:01:22.197488Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.197498Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.197500Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.197541Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20677 TClient is connected to server localhost:20677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.273222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.273250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.273586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.274102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:22.283395Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.292335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.306920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.338005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.358143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.551263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.569501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.633441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.657436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.721185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.733883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.747258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.927036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2418, MsgBus: 19514 2025-07-08T12:01:20.979179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679967421241093:2233];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.979241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b8c/r3tmp/tmppVyCDh/pdisk_1.dat 2025-07-08T12:01:21.029678Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2418, node 1 2025-07-08T12:01:21.061096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.061111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.061114Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.061160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19514 2025-07-08T12:01:21.081157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.081179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.081934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19514 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.125709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.129936Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.145757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.212354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.237101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.263117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.336462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.352383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.369291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.382332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.396451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.409429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.425587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.583861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12512, MsgBus: 23254 2025-07-08T12:01:22.281353Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b8c/r3tmp/tmpfCQm5f/pdisk_1.dat 2025-07-08T12:01:22.295865Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12512, node 2 2025-07-08T12:01:22.313499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.313513Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.313515Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.313558Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23254 2025-07-08T12:01:22.361929Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.361956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.362564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.441725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.445354Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.460907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:22.494181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.547857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.583756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.701055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.716443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.728772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.740043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.761418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.775252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.797186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.002665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 31564, MsgBus: 30598 2025-07-08T12:01:20.838139Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679967595731965:2252];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.838182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b97/r3tmp/tmpc7ATEb/pdisk_1.dat 2025-07-08T12:01:20.867321Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31564, node 1 2025-07-08T12:01:20.888224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.888235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.888237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.888269Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:20.898219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.898252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.899203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30598 TClient is connected to server localhost:30598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.973076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:20.975925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.984100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:21.055624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.114578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.180880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.286851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.307240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.319382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.335150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.347510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.369730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.390414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.567701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10395, MsgBus: 16747 2025-07-08T12:01:21.970818Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679971918775602:2084];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.972316Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b97/r3tmp/tmpDSG8V8/pdisk_1.dat 2025-07-08T12:01:22.004905Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10395, node 2 2025-07-08T12:01:22.020082Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.020092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.020095Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.020141Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16747 2025-07-08T12:01:22.079856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.079886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.080977Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.117437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.118960Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.150908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.210310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.233685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:22.293947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.526451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.539358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.556190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.570323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.585705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.601741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.663105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.975570Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:23.014943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 19062, MsgBus: 18370 2025-07-08T12:01:16.257996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679953387017627:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.258018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e5e/r3tmp/tmp85UfVn/pdisk_1.dat 2025-07-08T12:01:16.343558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19062, node 1 2025-07-08T12:01:16.362780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:16.362791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:16.362792Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:16.362820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18370 2025-07-08T12:01:16.400985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:16.401011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:16.402137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18370 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:16.449021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.452073Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:16.462719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.541946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.573841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.637401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:16.712942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.727009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.741668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.757110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.783370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.795787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:16.813605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.260026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:17.260261Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:17.346592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.423708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:17.535101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.632464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.691112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:17.758477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:17.777627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.058069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17527, MsgBus: 61244 2025-07-08T12:01:18.449549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679961810576580:2112];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e5e/r3tmp/tmpPwi69O/pdisk_1.dat 2025-07-08T12:01:18.453564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:18.470515Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17527, node 2 2025-07-08T12:01:18.481519Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:18.481536Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:18.481537Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:18.481586Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61244 TClient is connected to server localhost:61244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:18.552719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.558659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:18.558693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:18.561335Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:18.565706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.602221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.633692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.650032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:18.864353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.881920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.889698Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.917544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.936749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:18.999216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.022809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.449820Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:19.455827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:19.547606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.616118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:19.719423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.836370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.922355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:19.998913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:20.018057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.554653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29261, MsgBus: 62528 2025-07-08T12:01:20.877895Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679969494281726:2065];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.877909Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e5e/r3tmp/tmpF3yaRY/pdisk_1.dat 2025-07-08T12:01:20.900569Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29261, node 3 2025-07-08T12:01:20.921920Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.921931Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.921933Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.921973Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62528 TClient is connected to server localhost:62528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:20.982503Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.982530Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.982933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:20.983905Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:20.985457Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:20.989021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.008226Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.041062Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.053799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.190540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.209070Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.221596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.243708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.265483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.326179Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.340894Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.881253Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:21.883489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:21.999856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.073177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:22.182013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.243719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.326292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.408855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:22.435761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.106741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 8521, MsgBus: 28297 2025-07-08T12:01:21.200252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679972774765320:2222];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.200327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b89/r3tmp/tmpdwcZgP/pdisk_1.dat 2025-07-08T12:01:21.292327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8521, node 1 2025-07-08T12:01:21.325155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.325170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.325173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.325219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28297 TClient is connected to server localhost:28297 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:21.357284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.357310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.358665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:21.369355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.381864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.454225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.480272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.493459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.618805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.630293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.645664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.659397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.673680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.683652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.697122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.840823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61460, MsgBus: 12306 2025-07-08T12:01:22.335533Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679979406676563:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:22.335550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b89/r3tmp/tmp1K39Rq/pdisk_1.dat 2025-07-08T12:01:22.351910Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61460, node 2 2025-07-08T12:01:22.364935Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.364967Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.364970Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.365018Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12306 TClient is connected to server localhost:12306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.437717Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.437749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.438443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.439391Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:22.446634Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.495061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.520394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.544294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:22.567000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.770010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.782845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.796360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.814224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.828112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.839767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.856401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.065312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-07-08T12:01:06.908332Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679911041905696:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.908351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.914905Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679909271559016:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.914952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0016b5/r3tmp/tmpYpyUSr/pdisk_1.dat 2025-07-08T12:01:06.950999Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.949261Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:07.002151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:07.008012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:07.008034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:07.013582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24056, node 1 2025-07-08T12:01:07.081392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:07.081418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:07.093343Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:01:07.098666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:07.129345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/0016b5/r3tmp/yandexWgTSI5.tmp 2025-07-08T12:01:07.129359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/0016b5/r3tmp/yandexWgTSI5.tmp 2025-07-08T12:01:07.129409Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/0016b5/r3tmp/yandexWgTSI5.tmp 2025-07-08T12:01:07.129451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:07.133151Z INFO: TTestServer started on Port 30875 GrpcPort 24056 TClient is connected to server localhost:30875 PQClient connected to localhost:24056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:07.178559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:07.188000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:07.196462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:07.201169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:07.536658Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679915336873956:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:01:07.537566Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGFhODQ4MTEtMzNhNDA5MzYtZGRlN2RhYmItOWEyMWU3MWE=, ActorId: [1:7524679915336873953:2291], ActorState: ExecuteState, TraceId: 01jzmyjgda5z9fm852g63mtqe2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:01:07.538234Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:01:07.542988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.579960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.658616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T12:01:07.744226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyjgm870d0pmn40w7gf6xn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA3YzcxZWQtZDA2N2Y1NzUtNDRlMmYxYzYtYjY5N2E4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679915336874375:2975] 2025-07-08T12:01:07.909954Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:07.921018Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:11.908739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679911041905696:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.908790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:01:11.914857Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679909271559016:2234];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.914893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:12.792599Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7524679911041905893:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:12.792675Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7524679911041905893:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-07-08T12:01:12.792694Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7524679911041905893:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7524679915336873683:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976067226 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:01:12.792712Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7524679911041905893:2128], cacheItem# { Subscriber: { Subscriber: [1:7524679915336873683:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976067226 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 11 IsSync: true Partial: 0 } 2025-07-08T12:01:12.792772Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7524679936811711261:3233], recipient# [1:7524679936811711260:3232], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751976067226 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" Pat ... :23.030028Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0] Write session: close. Timeout = 0 ms 2025-07-08T12:01:23.030035Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0] Write session will now close 2025-07-08T12:01:23.030040Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0] Write session: aborting 2025-07-08T12:01:23.030122Z :INFO: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:23.030127Z :DEBUG: [/Root] MessageGroupId [account2/topic2] SessionId [account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0] Write session: destroy 2025-07-08T12:01:23.030323Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0 grpc read done: success: 0 data: 2025-07-08T12:01:23.030327Z node 3 :PQ_READ_PROXY DEBUG: session cookie 31 consumer userx session userx_3_31_6933299267918733827_v1 grpc read done: success# 0, data# { } 2025-07-08T12:01:23.030332Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6933299267918733827_v1 grpc read failed 2025-07-08T12:01:23.030337Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0 grpc read failed 2025-07-08T12:01:23.030341Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6933299267918733827_v1 grpc closed 2025-07-08T12:01:23.030346Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0 grpc closed 2025-07-08T12:01:23.030351Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: account2/topic2|2a55c87a-d4ca9711-1fa5f45c-715bee1a_0 is DEAD 2025-07-08T12:01:23.030354Z node 3 :PQ_READ_PROXY INFO: session cookie 31 consumer userx session userx_3_31_6933299267918733827_v1 is DEAD 2025-07-08T12:01:23.030558Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7524679984104357937:2774] disconnected; active server actors: 1 2025-07-08T12:01:23.030558Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037902 (partition=1) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:23.030566Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7524679984104357937:2774] client userx disconnected session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030587Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030592Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7524679984104357946:2780] destroyed 2025-07-08T12:01:23.030597Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030599Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7524679984104357944:2779] destroyed 2025-07-08T12:01:23.030603Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7524679975514422352:2476] destroyed 2025-07-08T12:01:23.030605Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030607Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030616Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902, Partition: 1, State: StateIdle] TPartition::DropOwner. 2025-07-08T12:01:23.030663Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030667Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030675Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7524679984104357943:2778] destroyed 2025-07-08T12:01:23.030681Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030681Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7524679984104357948:2781] destroyed 2025-07-08T12:01:23.030683Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7524679984104357941:2777] destroyed 2025-07-08T12:01:23.030696Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030698Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.030700Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: userx_3_31_6933299267918733827_v1 2025-07-08T12:01:23.117457Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7524679945449648469:2219], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.117508Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7524679945449648469:2219], cacheItem# { Subscriber: { Subscriber: [3:7524679949744616774:2961] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:23.117534Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7524679984104357963:4765], recipient# [3:7524679984104357962:2783], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.121631Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7524679941268884490:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.121686Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7524679941268884490:2102], cacheItem# { Subscriber: { Subscriber: [4:7524679945563851953:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:23.121717Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7524679979923591722:3234], recipient# [4:7524679979923591721:2367], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.122920Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7524679945449648469:2219], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.122953Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7524679945449648469:2219], cacheItem# { Subscriber: { Subscriber: [3:7524679949744616774:2961] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:23.122969Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7524679984104357967:4768], recipient# [3:7524679984104357966:2784], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.130771Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7524679941268884490:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:23.130822Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7524679941268884490:2102], cacheItem# { Subscriber: { Subscriber: [4:7524679945563851953:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:23.130846Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7524679979923591724:3235], recipient# [4:7524679979923591723:2368], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::Delete ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 27853, MsgBus: 62978 2025-07-08T12:01:21.617080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679971591765246:2238];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.617124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b83/r3tmp/tmprlxP1n/pdisk_1.dat 2025-07-08T12:01:21.680423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27853, node 1 2025-07-08T12:01:21.707360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.707373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.707375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.707409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62978 2025-07-08T12:01:21.756017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.756048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:62978 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:21.757279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:21.777252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.785630Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.825204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.893632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.961303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.020030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.140791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.153726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.166064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.179430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.195100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.252028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.264024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.440657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19757, MsgBus: 27878 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b83/r3tmp/tmpCbnQ9x/pdisk_1.dat 2025-07-08T12:01:22.904372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:22.907489Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19757, node 2 2025-07-08T12:01:22.919463Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.919477Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.919479Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.919528Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27878 2025-07-08T12:01:22.968392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.968423Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.969511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.980581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.985381Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.989955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.999272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.017177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.029777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.233228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.242186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.250511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.264994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.278965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.293410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.314200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.473081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::SingleRowSimple+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7061, MsgBus: 62508 2025-07-08T12:01:21.384801Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679974437518749:2145];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b85/r3tmp/tmpDxWy7c/pdisk_1.dat 2025-07-08T12:01:21.432895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:21.456826Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7061, node 1 2025-07-08T12:01:21.472029Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.472038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.472040Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.472076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:21.487364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.487390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.488249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62508 TClient is connected to server localhost:62508 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:21.541366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.577873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.642272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.667210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:21.686368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.817049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.835024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.843756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.858300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.873411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.886481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.901729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.096116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64536, MsgBus: 19889 2025-07-08T12:01:22.643082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679975821874849:2088];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:22.644616Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b85/r3tmp/tmpwyKuph/pdisk_1.dat 2025-07-08T12:01:22.662814Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64536, node 2 2025-07-08T12:01:22.671670Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.671681Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.671683Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.671726Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19889 TClient is connected to server localhost:19889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.741242Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.741270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.741443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.742583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:22.745511Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.759544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.785718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.815585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.829005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.164908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.173682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.193351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.253290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.265027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.279194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.293379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.469274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.639474Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: Trying to start YDB, gRPC: 6002, MsgBus: 7334 2025-07-08T12:01:21.860492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679974143417285:2068];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.860520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7f/r3tmp/tmpYM06Gl/pdisk_1.dat 2025-07-08T12:01:21.913764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6002, node 1 2025-07-08T12:01:21.945153Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.945167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.945169Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.945211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7334 2025-07-08T12:01:21.962885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.962918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.964029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.004213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.006483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.049609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.116204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.179083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.190998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.266993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.284279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.292513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.311332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.325946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.342220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.357432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.573368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61181, MsgBus: 20882 2025-07-08T12:01:23.010432Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679983725293478:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.010481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7f/r3tmp/tmpFadcma/pdisk_1.dat 2025-07-08T12:01:23.024646Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61181, node 2 2025-07-08T12:01:23.031888Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.031901Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.031903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.031948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20882 TClient is connected to server localhost:20882 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.110761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.110788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.111882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.113588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.119144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.129622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.145520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.156033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.365140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.375443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.383344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.438860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.446051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.461673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.475036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.615968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1937, MsgBus: 64937 2025-07-08T12:01:20.854683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679967989057763:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:20.854700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b9e/r3tmp/tmptSLS6W/pdisk_1.dat 2025-07-08T12:01:20.911988Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1937, node 1 2025-07-08T12:01:20.937110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:20.937124Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:20.937126Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:20.937160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64937 2025-07-08T12:01:20.955432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:20.955457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:20.956526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64937 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.023075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.032791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.044819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.129406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.193814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.212875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.291364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.298411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.311910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.369817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.381330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.402968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:21.419339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9294, MsgBus: 28677 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b9e/r3tmp/tmpVaqT25/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9294, node 2 2025-07-08T12:01:22.132509Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:22.136248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.136262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.136264Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.136307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28677 TClient is connected to server localhost:28677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.215043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.215064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.215355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.216363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:22.219451Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.287745Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.323590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.394210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.428225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.632428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.669613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.685398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.701165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.715316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.733330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.746727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.964704Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:22.966183Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:22.966233Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:22.966306Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679979577799651:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7524679979577799630:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7524679979577799651:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:22.966413Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679979577799644:2455], SessionActorId: [2:7524679979577799630:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7524679979577799630:2455]. isRollback=0 2025-07-08T12:01:22.966473Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDAyNzYzOWMtNTgwZjcyNjAtYzVhZWJjM2YtM2M0MTg1Zjg=, ActorId: [2:7524679979577799630:2455], ActorState: ExecuteState, TraceId: 01jzmyjzg67sd0d7nwgf8ysjhc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7524679979577799645:2455] from: [2:7524679979577799644:2455] 2025-07-08T12:01:22.966560Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524679979577799645:2455] TxId: 281474976715670. Ctx: { TraceId: 01jzmyjzg67sd0d7nwgf8ysjhc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDAyNzYzOWMtNTgwZjcyNjAtYzVhZWJjM2YtM2M0MTg1Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:22.966619Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDAyNzYzOWMtNTgwZjcyNjAtYzVhZWJjM2YtM2M0MTg1Zjg=, ActorId: [2:7524679979577799630:2455], ActorState: ExecuteState, TraceId: 01jzmyjzg67sd0d7nwgf8ysjhc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25451, MsgBus: 61891 2025-07-08T12:01:23.226770Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679983010918541:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.226787Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b9e/r3tmp/tmp0eOJRa/pdisk_1.dat 2025-07-08T12:01:23.241794Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25451, node 3 2025-07-08T12:01:23.255242Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.255255Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.255268Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.255307Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61891 TClient is connected to server localhost:61891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.293189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.294874Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:23.312470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.326904Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.326933Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.328129Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.368256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.385291Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:23.401728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.551621Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.559083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.572497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.586576Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.600036Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.614778Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.674512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.831977Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679983010921007:2455], TxId: 281474976710671, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jzmyk0ap5znrjjx34fqn62wa. SessionId : ydb://session/3?node_id=3&id=ZGQxYjY2YjktYWIwZmI5NzYtMWU1MzIwOWMtNDQwMWZmMDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T12:01:23.832048Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524679983010921009:2456], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZGQxYjY2YjktYWIwZmI5NzYtMWU1MzIwOWMtNDQwMWZmMDY=. CustomerSuppliedId : . TraceId : 01jzmyk0ap5znrjjx34fqn62wa. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [3:7524679983010921004:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:23.832106Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGQxYjY2YjktYWIwZmI5NzYtMWU1MzIwOWMtNDQwMWZmMDY=, ActorId: [3:7524679983010920978:2446], ActorState: ExecuteState, TraceId: 01jzmyk0ap5znrjjx34fqn62wa, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17984, MsgBus: 31724 2025-07-08T12:01:21.868698Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679972493944492:2230];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.868751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b82/r3tmp/tmpIVISvH/pdisk_1.dat 2025-07-08T12:01:21.926748Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17984, node 1 2025-07-08T12:01:21.947641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.947656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.947657Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.947705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31724 TClient is connected to server localhost:31724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:22.001399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.001426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.002694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.027565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.032093Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.054078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.080338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.111234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.139699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.269449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.279613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.296393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.306670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.320708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.335499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.352472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.585648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15791, MsgBus: 28823 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b82/r3tmp/tmpL40hKD/pdisk_1.dat 2025-07-08T12:01:23.079232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:23.080720Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15791, node 2 2025-07-08T12:01:23.091464Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.091474Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.091476Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.091518Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28823 TClient is connected to server localhost:28823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.174015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.174046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.174342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.176499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.195394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.206226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.225368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:23.238424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.452362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.462773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.474329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.482875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.496205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.509415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.526230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.691899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpImmediateEffects::InsertExistingKey-UseSink >> TOlapReboots::CreateMultipleTables [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 12387, MsgBus: 4822 2025-07-08T12:01:21.734333Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679974269971941:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.734351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b81/r3tmp/tmpKaRric/pdisk_1.dat 2025-07-08T12:01:21.794618Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12387, node 1 2025-07-08T12:01:21.817162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:21.817178Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:21.817180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:21.817223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4822 TClient is connected to server localhost:4822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:21.868843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:21.868868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:21.869744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:21.876760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:21.884131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:21.930035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:21.997014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.019501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.036508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.153860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.166345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.181945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.194265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.210431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.222137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.238195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.436360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.737073Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 10674, MsgBus: 28554 2025-07-08T12:01:23.165133Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679982466643532:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.165152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b81/r3tmp/tmpDGNS4g/pdisk_1.dat 2025-07-08T12:01:23.180388Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10674, node 2 2025-07-08T12:01:23.187794Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.187806Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.187808Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.187860Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28554 TClient is connected to server localhost:28554 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.269145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.269176Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-07-08T12:01:23.269478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.269990Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.275495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:23.289206Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-07-08T12:01:23.290329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.323118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.338950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.488197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.499864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.509667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.524080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.541456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.554697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.568658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.774601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9281, MsgBus: 28898 2025-07-08T12:01:22.028250Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679976798335601:2138];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:22.028720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7c/r3tmp/tmpOpmkaF/pdisk_1.dat 2025-07-08T12:01:22.084605Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9281, node 1 2025-07-08T12:01:22.107766Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.107777Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.107779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.107813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28898 2025-07-08T12:01:22.165332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.165359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.166374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.197586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.200015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.207578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.229295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.258184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:22.273901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.353994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.377846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.400373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.415601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.427033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.440567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.453456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.659172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.676754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.696367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.027072Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 30469, MsgBus: 28954 2025-07-08T12:01:23.502613Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679981024482003:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.502647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7c/r3tmp/tmpiHqLUG/pdisk_1.dat 2025-07-08T12:01:23.520334Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30469, node 2 2025-07-08T12:01:23.528227Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.528252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.528254Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.528294Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28954 TClient is connected to server localhost:28954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.605523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.605549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.605869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.606574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.615123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.630800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.650177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.668458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.828585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.839633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.853177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.868471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.880516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.895065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.908920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.057351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::UpsertDuplicates >> KqpEffects::DeletePkPrefixWithIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 13800, MsgBus: 7812 2025-07-08T12:01:22.947921Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679978605411773:2148];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:22.948610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7b/r3tmp/tmp8MC3lQ/pdisk_1.dat 2025-07-08T12:01:22.995482Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13800, node 1 2025-07-08T12:01:23.016005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.016017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.016019Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.016067Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7812 2025-07-08T12:01:23.047729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.047754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.048864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.064328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.072826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.086775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.103465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.113875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.283103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.290003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.299202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.306691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.320590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.335081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.348562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.513654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28624, MsgBus: 28323 2025-07-08T12:01:23.811788Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679981979955848:2067];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.811831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7b/r3tmp/tmp59axc9/pdisk_1.dat 2025-07-08T12:01:23.829297Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28624, node 2 2025-07-08T12:01:23.840262Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.840272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.840274Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.840310Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28323 TClient is connected to server localhost:28323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.916528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.916552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.916967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.917816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:23.921388Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:23.928921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.944445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.968852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.986217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.143450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.152616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.161506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.177685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.190778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.203336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.217336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.389230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.516671Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmJhNjlmODItODNiZDIwMDktMzIwNjFlM2EtNmQxMzlhMzM=, ActorId: [2:7524679986274925577:2448], ActorState: ExecuteState, TraceId: 01jzmyk10qcw0yxr81fjp8sfkw, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::InsertDuplicates-UseSink >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 6634, MsgBus: 1371 2025-07-08T12:01:22.736608Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679979511856129:2218];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:22.745411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7a/r3tmp/tmprXuiD5/pdisk_1.dat 2025-07-08T12:01:22.826443Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6634, node 1 2025-07-08T12:01:22.861149Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.861162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.861164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.861205Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:22.894109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.894139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1371 2025-07-08T12:01:22.895211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.973796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.978084Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.990284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.050863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.108945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.118987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.190625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.199573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.208221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.223162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.236932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.250511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.265204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.409934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13083, MsgBus: 9389 2025-07-08T12:01:23.852847Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679983566597489:2084];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.854486Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7a/r3tmp/tmps9nbpv/pdisk_1.dat 2025-07-08T12:01:23.892969Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13083, node 2 2025-07-08T12:01:23.923657Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.923672Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.923674Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.923726Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9389 2025-07-08T12:01:23.965128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.965160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.966654Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.983073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.986821Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:23.999040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.024477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.047403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.059074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.221342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.230277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.238904Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.253288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.265703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.279582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.293573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.502670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.631301Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yjk1NTZmMGEtZjJkYWM0N2EtODAxNDY0N2MtMmM5ZGVkN2I=, ActorId: [2:7524679987861567199:2448], ActorState: ExecuteState, TraceId: 01jzmyk146csk8cwa72gx4xr24, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateMultipleTables [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:00:38.411488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:00:38.411506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:38.411512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:00:38.411516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:00:38.411521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:00:38.411524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:00:38.411532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:00:38.411549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:00:38.411612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:38.424261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:00:38.424288Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:00:38.429300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:38.429346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:00:38.429379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:00:38.431035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:00:38.431130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:00:38.431205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:38.431242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:00:38.431558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:38.431590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:00:38.431744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:38.431749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:38.431763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:00:38.431769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:38.431774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:00:38.431797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:00:38.433788Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:00:38.451335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:00:38.451388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.451434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:00:38.451469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:00:38.451478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:00:38.452053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452062Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:00:38.452066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:00:38.452071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:00:38.452412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:00:38.452716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.452731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:38.452738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:00:38.453297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:00:38.453655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:00:38.453689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:00:38.453859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:00:38.453884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:00:38.453891Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:38.454034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:00:38.454043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:00:38.454066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:00:38.454078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:00:38.454440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:00:38.454448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:00:38.454475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:00:38.454480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:00:38.454529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:00:38.454535Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:00:38.454545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:38.454549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:38.454554Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:00:38.454556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:00:38.454560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:00:38.454565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:24.497977Z node 89 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:01:24.497982Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-07-08T12:01:24.497988Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-07-08T12:01:24.498104Z node 89 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:24.498117Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:24.498120Z node 89 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:01:24.498124Z node 89 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 4 2025-07-08T12:01:24.498128Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-07-08T12:01:24.498140Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-07-08T12:01:24.498840Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:01:24.498878Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:5 msg type: 268697639 2025-07-08T12:01:24.498897Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72057594037968897 2025-07-08T12:01:24.498988Z node 89 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 1003 TxPartId: 0 2025-07-08T12:01:24.499019Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 1003 TxPartId: 0, at schemeshard: 72057594046678944 2025-07-08T12:01:24.499031Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 1003 TxPartId: 0 2025-07-08T12:01:24.499989Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:01:24.500147Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:01:24.500350Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:24.511316Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-07-08T12:01:24.511340Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-07-08T12:01:24.511364Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 FAKE_COORDINATOR: Erasing txId 1003 2025-07-08T12:01:24.511875Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:24.511917Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:24.511926Z node 89 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-07-08T12:01:24.511946Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:01:24.511954Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:24.511960Z node 89 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:01:24.511963Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:24.511969Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-07-08T12:01:24.511982Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [89:362:2340] message: TxId: 1003 2025-07-08T12:01:24.511990Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:24.511996Z node 89 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-07-08T12:01:24.512000Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-07-08T12:01:24.512031Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-07-08T12:01:24.512579Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:24.512591Z node 89 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [89:435:2406] TestWaitNotification: OK eventTxId 1003 2025-07-08T12:01:24.512722Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:24.512805Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable1" took 92us result status StatusSuccess 2025-07-08T12:01:24.512930Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable1" PathDescription { Self { Name: "ColumnTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1004 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable1" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:24.513110Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:24.513146Z node 89 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable2" took 37us result status StatusSuccess 2025-07-08T12:01:24.513204Z node 89 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable2" PathDescription { Self { Name: "ColumnTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "ColumnTable2" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 3 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpImmediateEffects::Interactive >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::EmptyUpdate+UseSink >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> KqpWrite::CastValues ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 6200, MsgBus: 24276 2025-07-08T12:01:21.985945Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679973058364879:2239];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:21.988924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7e/r3tmp/tmpjVGpQX/pdisk_1.dat 2025-07-08T12:01:22.067767Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6200, node 1 2025-07-08T12:01:22.084045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:22.084067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:22.090659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:22.102691Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:22.102699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:22.102701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:22.102734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24276 TClient is connected to server localhost:24276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:22.173676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.176336Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:22.201602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.237556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.266294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.291155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:22.409661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.426019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.445547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.460315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.481328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.497556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.511655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:22.697002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23131, MsgBus: 61323 2025-07-08T12:01:23.166148Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679980553523115:2191];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.166890Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7e/r3tmp/tmpEc8btC/pdisk_1.dat 2025-07-08T12:01:23.184973Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23131, node 2 2025-07-08T12:01:23.197599Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.197615Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.197616Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.197660Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61323 TClient is connected to server localhost:61323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:23.268377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.268405Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.268678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.269496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:23.280055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.288978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.311985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.323078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.542726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.552818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.565788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.579618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.596331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.650587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.665085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.807414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.899600Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679980553525565:2472], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTdkNzJjZGYtYTlhMzNlYTEtMTUwNjQzYjItOWE3ZGU0MWU=. CustomerSuppliedId : . TraceId : 01jzmyk0c5bnemvdxh8xdh9v8n. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T12:01:23.899695Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679980553525567:2473], TxId: 281474976715674, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTdkNzJjZGYtYTlhMzNlYTEtMTUwNjQzYjItOWE3ZGU0MWU=. TraceId : 01jzmyk0c5bnemvdxh8xdh9v8n. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [2:7524679980553525562:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:23.899760Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTdkNzJjZGYtYTlhMzNlYTEtMTUwNjQzYjItOWE3ZGU0MWU=, ActorId: [2:7524679980553525406:2446], ActorState: ExecuteState, TraceId: 01jzmyk0c5bnemvdxh8xdh9v8n, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2092, MsgBus: 17769 2025-07-08T12:01:24.306144Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679986600444793:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.307330Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b7e/r3tmp/tmp9fp8EC/pdisk_1.dat 2025-07-08T12:01:24.319877Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2092, node 3 2025-07-08T12:01:24.331968Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.331979Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.331981Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.332025Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17769 TClient is connected to server localhost:17769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:24.411375Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.411406Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:24.411813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.412383Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:24.420768Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:24.425428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.438430Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.465826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.484394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.660081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.671544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.682314Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.693435Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.707101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.721617Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.737058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> TOlapReboots::CreateDropStandaloneTable [GOOD] >> TOlapReboots::AlterTtlSettings ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-07-08T12:01:11.973498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679928530011020:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.973646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:11.982265Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679930165577648:2163];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00166f/r3tmp/tmpJASmA3/pdisk_1.dat 2025-07-08T12:01:12.030562Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:12.031447Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:12.029290Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:12.079666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:12.083096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.083120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.084899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26363, node 1 2025-07-08T12:01:12.130088Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/00166f/r3tmp/yandexzr5jPT.tmp 2025-07-08T12:01:12.130101Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/00166f/r3tmp/yandexzr5jPT.tmp 2025-07-08T12:01:12.130159Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/00166f/r3tmp/yandexzr5jPT.tmp 2025-07-08T12:01:12.130215Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:12.131983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:12.131997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:12.134085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:01:12.134506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:12.136879Z INFO: TTestServer started on Port 5962 GrpcPort 26363 TClient is connected to server localhost:5962 PQClient connected to localhost:26363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:12.186028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:12.265678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 2025-07-08T12:01:12.267224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T12:01:12.608931Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679932824979131:2293], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:01:12.609663Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YThjNDgxYjEtNmZhZDk0NjUtNjFmYzA5NzctZDE2Nzc2ZTY=, ActorId: [1:7524679932824979128:2291], ActorState: ExecuteState, TraceId: 01jzmyjnb9d1bswze78e639xr2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:01:12.610145Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:01:12.610630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.638475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:12.738644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T12:01:12.781081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720665. Ctx: { TraceId: 01jzmyjnhr9f7c1svtazfn911k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNlYTU0Y2QtMTVhNzJmNmQtMWNmN2U2YjUtZDAyMzczMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679932824979549:2967] 2025-07-08T12:01:12.973246Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:12.981843Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:16.973163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679928530011020:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.973191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:01:16.984993Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679930165577648:2163];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:16.985442Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1751976072259 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "PQ" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720658 CreateStep: 1751976072308 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 1844674... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:17.845729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7524679932824978464:2167], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:17.845829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7524679932824978464:2167], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-07-08T12:01:17.845852Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7524679932824978464:2167], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7524679932824978865:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976072259 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:01:17.845868Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7524679932824978464:2167], cacheItem# { Subscriber: { Subscriber: [1:7524679932824978865:2446] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976072259 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 11 IsSync: true Partial: 0 } 2025-07-08T12:01:17.845957Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7524679954299816429:3219], recipient# [1:7524679954299816428:3218], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root ... maxSize 661 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid ab482d35-badf4768-711af000-7a695d40 2025-07-08T12:01:25.210814Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2' requestId: 2025-07-08T12:01:25.210825Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] got client message batch for topic 'account2/topic2' partition 0 2025-07-08T12:01:25.210860Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904, Partition: 0, State: StateIdle] read cookie 3 Topic 'account2/topic2' partition 0 user user1 offset 0 count 4 size 661 endOffset 4 max time lag 0ms effective offset 0 2025-07-08T12:01:25.210863Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-07-08T12:01:25.210891Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-07-08T12:01:25.210895Z node 4 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-07-08T12:01:25.210927Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'topic2' partition: 0 messageNo: 0 requestId: cookie: 0 2025-07-08T12:01:25.211124Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 grpc read done: success# 1, data# { start_read { topic { path: "account2/topic2" } partition: 3 assign_id: 2 } } 2025-07-08T12:01:25.211186Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 got StartRead from client: partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 3(assignId:2), readOffset# 0, commitOffset# 0 2025-07-08T12:01:25.211254Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 1 WriteTimestampMS: 1751976085081 CreateTimestampMS: 1751976085073 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 2 WriteTimestampMS: 1751976085083 CreateTimestampMS: 1751976085073 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 3 WriteTimestampMS: 1751976085083 CreateTimestampMS: 1751976085073 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 94 bytes ..." SourceId: "\000123" SeqNo: 4 WriteTimestampMS: 1751976085083 CreateTimestampMS: 1751976085073 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 19 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-07-08T12:01:25.211284Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) wait data in partition inited, cookie 1 from offset4 2025-07-08T12:01:25.211292Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 after read state TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5) EndOffset 4 ReadOffset 4 ReadGuid ab482d35-badf4768-711af000-7a695d40 has messages 1 2025-07-08T12:01:25.211314Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 Start reading TopicId: Topic /Root/account2/topic2 in database: Root, partition 3(assignId:2) EndOffset 0 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-07-08T12:01:25.211316Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 TopicId: Topic /Root/account2/topic2 in database: Root, partition 3(assignId:2) wait data in partition inited, cookie 1 from offset0 2025-07-08T12:01:25.211331Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 read done: guid# ab482d35-badf4768-711af000-7a695d40, partition# TopicId: Topic /Root/account2/topic2 in database: Root, partition 0(assignId:5), size# 416 2025-07-08T12:01:25.211338Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 response to read: guid# ab482d35-badf4768-711af000-7a695d40 2025-07-08T12:01:25.211398Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 Process answer. Aval parts: 0 2025-07-08T12:01:25.211651Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-07-08T12:01:25.211761Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2025-07-08T12:01:25.212178Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-07-08T12:01:25.212191Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 4 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 4 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 0 SeqNo: 1 MessageGroupId: "123" CreateTime: 2025-07-08T12:01:25.073000Z WriteTime: 2025-07-08T12:01:25.081000Z Ip: "ipv6:[::1]:39174" UncompressedSize: 10 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:39174" } } } } 2025-07-08T12:01:25.212219Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-07-08T12:01:25.212222Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 4 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 4 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 1 SeqNo: 2 MessageGroupId: "123" CreateTime: 2025-07-08T12:01:25.073000Z WriteTime: 2025-07-08T12:01:25.083000Z Ip: "ipv6:[::1]:39174" UncompressedSize: 10 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:39174" } } } } 2025-07-08T12:01:25.212230Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-07-08T12:01:25.212233Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 4 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 4 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "123" CreateTime: 2025-07-08T12:01:25.073000Z WriteTime: 2025-07-08T12:01:25.083000Z Ip: "ipv6:[::1]:39174" UncompressedSize: 10 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:39174" } } } } 2025-07-08T12:01:25.212241Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (3-3) 2025-07-08T12:01:25.212245Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] The application data is transferred to the client. Number of messages 1, size 10 bytes GOT MESSAGE: DataReceived { PartitionStreamId: 4 PartitionId: 0 Message { Data: ..10 bytes.. Partition stream id: 4 Cluster: "". Topic: "account2/topic2" Partition: 0 PartitionKey: "" Information: { Offset: 3 SeqNo: 4 MessageGroupId: "123" CreateTime: 2025-07-08T12:01:25.073000Z WriteTime: 2025-07-08T12:01:25.083000Z Ip: "ipv6:[::1]:39174" UncompressedSize: 10 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:39174" } } } } 2025-07-08T12:01:25.212263Z :INFO: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] Closing read session. Close timeout: 0.000000s 2025-07-08T12:01:25.212273Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account2/topic2:3:5:0:0 null:account2/topic2:0:4:3:0 null:account2/topic2:4:3:0:0 null:account2/topic2:1:2:0:0 null:account2/topic2:2:1:0:0 2025-07-08T12:01:25.212278Z :INFO: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] Counters: { Errors: 0 CurrentSessionLifetimeMs: 14 BytesRead: 40 MessagesRead: 4 BytesReadCompressed: 92 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:25.212295Z :NOTICE: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-07-08T12:01:25.212301Z :DEBUG: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] [null] Abort session to cluster 2025-07-08T12:01:25.212406Z :NOTICE: [/Root] [/Root] [5bb813c9-e5797a5f-f44c5f4d-45657eb1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-07-08T12:01:25.212794Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 grpc read done: success# 1, data# { read { } } 2025-07-08T12:01:25.212807Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 grpc closed 2025-07-08T12:01:25.212818Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer user1 session user1_3_2_6944381216716582673_v1 is DEAD 2025-07-08T12:01:25.213363Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.213374Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7524679991361325596:2488] destroyed 2025-07-08T12:01:25.213377Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] Destroy direct read session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.213379Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037903] server disconnected, pipe [3:7524679991361325600:2487] destroyed 2025-07-08T12:01:25.213382Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] Destroy direct read session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.213384Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037904] server disconnected, pipe [3:7524679991361325599:2491] destroyed 2025-07-08T12:01:25.213396Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.213399Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.213400Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.217425Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037905][topic2] pipe [3:7524679991361325587:2484] disconnected; active server actors: 1 2025-07-08T12:01:25.217436Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037905][topic2] pipe [3:7524679991361325587:2484] client user1 disconnected session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.217468Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.217473Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7524679991361325598:2490] destroyed 2025-07-08T12:01:25.217477Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] Destroy direct read session user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.217479Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037902] server disconnected, pipe [3:7524679991361325597:2489] destroyed 2025-07-08T12:01:25.217497Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_6944381216716582673_v1 2025-07-08T12:01:25.217499Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: user1_3_2_6944381216716582673_v1 >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 65075, MsgBus: 9864 2025-07-08T12:01:23.618498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679980753765828:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:23.620229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b79/r3tmp/tmpHOJlKG/pdisk_1.dat 2025-07-08T12:01:23.680742Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65075, node 1 2025-07-08T12:01:23.753084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:23.753116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:23.756830Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:23.756846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:23.756848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:23.756906Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:23.757373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9864 TClient is connected to server localhost:9864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:23.814512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:23.821995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.893214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.912392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:23.924194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.124186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.134679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.146704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.160447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.217123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.233496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.246422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.433535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3821, MsgBus: 13573 2025-07-08T12:01:24.744806Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679986322154537:2135];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.744967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b79/r3tmp/tmpYRMk2Y/pdisk_1.dat 2025-07-08T12:01:24.764170Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3821, node 2 2025-07-08T12:01:24.793532Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.793547Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.793549Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.793593Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13573 TClient is connected to server localhost:13573 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:24.853311Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.853338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:24.860766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.861398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:24.863549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.879226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.904623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.918049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.099518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.112859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.126485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.141231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.159955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.173238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.182581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.376746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.515876Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzY5MzY1NWEtZDNjM2FiYjItZDRhNGY2ZC04OTc2MTA0ZA==, ActorId: [2:7524679990617124213:2448], ActorState: ExecuteState, TraceId: 01jzmyk1zx4nrvq26r0jtfpb3c, Create QueryResponse for error on request, msg: Error while locks merge >> KqpEffects::EmptyUpdate+UseSink [GOOD] >> KqpEffects::EmptyUpdate-UseSink >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView |69.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |69.5%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> KqpWrite::CastValues [GOOD] >> KqpImmediateEffects::Interactive [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22964, MsgBus: 26694 2025-07-08T12:01:24.227678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679984904776227:2073];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.227800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b78/r3tmp/tmpMOA6jl/pdisk_1.dat 2025-07-08T12:01:24.283286Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22964, node 1 2025-07-08T12:01:24.321135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.321150Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.321152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.321191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:24.334935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.334966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:26694 2025-07-08T12:01:24.337112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:24.377236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.388223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.407538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.438029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.449680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.634526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.643862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.657860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.672777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.685641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.700182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.714206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.860530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7152, MsgBus: 3804 2025-07-08T12:01:25.217360Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679989854930911:2201];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.218249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b78/r3tmp/tmpm1Hr9a/pdisk_1.dat 2025-07-08T12:01:25.243417Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7152, node 2 2025-07-08T12:01:25.281628Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.281637Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.281639Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.281684Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3804 2025-07-08T12:01:25.314910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.314938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.321338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:25.335480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.337426Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.345696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.365703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.390064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.407868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.607955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.618348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.630804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.657722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.668571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.726263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.740897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.920934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.210779Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable >> KqpEffects::EmptyUpdate-UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29955, MsgBus: 16116 2025-07-08T12:01:24.696029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679984926064761:2243];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.697637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b74/r3tmp/tmpgdQt4o/pdisk_1.dat 2025-07-08T12:01:24.761097Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29955, node 1 2025-07-08T12:01:24.783214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.783225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.783227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.783264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:24.804508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.804553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:24.804943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16116 TClient is connected to server localhost:16116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:24.851051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.854438Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:24.874166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.937757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.963559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.977474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.125014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.139460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.148199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.161161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.178439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.189439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.204965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.343056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62992, MsgBus: 22603 2025-07-08T12:01:25.603223Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679989740434341:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.603245Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b74/r3tmp/tmp0MUrQ3/pdisk_1.dat 2025-07-08T12:01:25.629180Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62992, node 2 2025-07-08T12:01:25.641693Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.641705Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.641707Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.641750Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22603 TClient is connected to server localhost:22603 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.703527Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.703559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.705738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:25.709522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.713380Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:25.742143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.774216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.805470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.822457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.961552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.983788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.994339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.009707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.023148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.037750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.051262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.221394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValues [GOOD] Test command err: Trying to start YDB, gRPC: 32126, MsgBus: 19453 2025-07-08T12:01:24.948498Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679984265195086:2074];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.949514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6f/r3tmp/tmpewqNzY/pdisk_1.dat 2025-07-08T12:01:25.005159Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32126, node 1 2025-07-08T12:01:25.034508Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.034520Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.034522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.034564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:25.048547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.048575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.049744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19453 TClient is connected to server localhost:19453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.102905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.112474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.148341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.177634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.194072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.295422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.351885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.365880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.379588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.434098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.441649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.455452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.595766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23822, MsgBus: 20903 2025-07-08T12:01:25.896082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679990981800131:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.897779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6f/r3tmp/tmpl6Eruw/pdisk_1.dat 2025-07-08T12:01:25.905937Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23822, node 2 2025-07-08T12:01:25.915819Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.915833Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.915835Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.915869Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20903 TClient is connected to server localhost:20903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:26.001219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:26.001244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:26.001511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.005250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:26.005515Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.013809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.022754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.046037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.056786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.200465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.208684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.218825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.232398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.287865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.343454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.354219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> KqpEffects::AlterDuringUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 21387, MsgBus: 22920 2025-07-08T12:01:24.461702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679986377652432:2148];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b77/r3tmp/tmpH5FMp8/pdisk_1.dat 2025-07-08T12:01:24.505240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:24.522432Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21387, node 1 2025-07-08T12:01:24.546739Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.546751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.546753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.546792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22920 TClient is connected to server localhost:22920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:24.601003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.601045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:24.601950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:24.608050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.610740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:24.614683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.640998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.662789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.679914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:24.825508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.843354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.855140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.873834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.888311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.961260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.976523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.189290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62272, MsgBus: 30272 2025-07-08T12:01:25.564160Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679988790260074:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.564190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b77/r3tmp/tmpILvGlA/pdisk_1.dat 2025-07-08T12:01:25.592327Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62272, node 2 2025-07-08T12:01:25.617012Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.617026Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.617027Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.617078Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30272 TClient is connected to server localhost:30272 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:25.669172Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.669201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.685442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:25.685830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.693531Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.703203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.714765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.739633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.759022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.908053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.919428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.937267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.969864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.005211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.029048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.050508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.226362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.343639Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTNjYjc4ZmEtZjRhZGJlZTEtMzg3YjlhMi05OGZhMjFlZA==, ActorId: [2:7524679993085229799:2448], ActorState: ExecuteState, TraceId: 01jzmyk2sqes049ax1rgcr9fh8, Create QueryResponse for error on request, msg: Error while locks merge >> TOlapReboots::CreateDropStore [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 7300, MsgBus: 27242 2025-07-08T12:01:24.680261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679984602092584:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.681225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b75/r3tmp/tmp3R8CCi/pdisk_1.dat 2025-07-08T12:01:24.749411Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7300, node 1 2025-07-08T12:01:24.784402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:24.784412Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:24.784413Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:24.784444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27242 2025-07-08T12:01:24.821252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:24.821279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:24.822946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:24.869298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.872534Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:24.885802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:24.949706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:24.977706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:25.000079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.129444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.141651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.154721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.168814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.182657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.196135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.204197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.357663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.450821Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679988897062481:2475], TxId: 281474976710675, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTY1NGY5ZDctZWM4Njc5MC00MTkyYmJhZi03ODI3NTQ2OA==. TraceId : 01jzmyk1wt4w2d7ccdcg8mh96r. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:25.450910Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679988897062482:2476], TxId: 281474976710675, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTY1NGY5ZDctZWM4Njc5MC00MTkyYmJhZi03ODI3NTQ2OA==. TraceId : 01jzmyk1wt4w2d7ccdcg8mh96r. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [1:7524679988897062478:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:25.450981Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTY1NGY5ZDctZWM4Njc5MC00MTkyYmJhZi03ODI3NTQ2OA==, ActorId: [1:7524679988897062328:2446], ActorState: ExecuteState, TraceId: 01jzmyk1wt4w2d7ccdcg8mh96r, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 17994, MsgBus: 22737 2025-07-08T12:01:25.801428Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679989250584870:2073];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.802296Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b75/r3tmp/tmpYRowt0/pdisk_1.dat 2025-07-08T12:01:25.827109Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17994, node 2 2025-07-08T12:01:25.852962Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.852973Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.852975Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.853014Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22737 TClient is connected to server localhost:22737 2025-07-08T12:01:25.908598Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.908624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.909681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:25.911804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.913963Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.942011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.966403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.990028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.006638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.174388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.184810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.198978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.214126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.233374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.257008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.268910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.432889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 22159, MsgBus: 23710 2025-07-08T12:01:25.233277Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679990519869424:2141];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.233997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6d/r3tmp/tmpyIUIIa/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22159, node 1 2025-07-08T12:01:25.313199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:25.329428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.329439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.329441Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.329482Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23710 TClient is connected to server localhost:23710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:25.377712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.377733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.378780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.387057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.399532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:25.464055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.486361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.496774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.672145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.678831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.686725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.706187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.716295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.729238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.745867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.884871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23440, MsgBus: 22221 2025-07-08T12:01:26.142069Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679995116232167:2140];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:26.143898Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6d/r3tmp/tmpcfiFs7/pdisk_1.dat 2025-07-08T12:01:26.158997Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23440, node 2 2025-07-08T12:01:26.172406Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:26.172419Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:26.172423Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:26.172478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22221 TClient is connected to server localhost:22221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:26.246578Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:26.246601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:26.246825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.247833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:26.252203Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:26.253955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.265988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:26.287548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.299548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.448734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.455844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.473091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.528047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.542525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.555094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.569035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.721421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EmptyUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7111, MsgBus: 8774 2025-07-08T12:01:24.972408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679985611157336:2145];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.033306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b72/r3tmp/tmpMw2GQI/pdisk_1.dat 2025-07-08T12:01:25.073956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.073977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.079824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:25.080206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7111, node 1 2025-07-08T12:01:25.099025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.099036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.099038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.099080Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8774 TClient is connected to server localhost:8774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.151288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.154521Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:25.200938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.220046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:25.245008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.261971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.360266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.369350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.380185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.391864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.446665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.454937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.469476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.599452Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:25.600914Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:25.600975Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:25.601038Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679989906127017:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7524679989906126995:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7524679989906127017:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:25.601123Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679989906127010:2455], SessionActorId: [1:7524679989906126995:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7524679989906126995:2455]. isRollback=0 2025-07-08T12:01:25.601182Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmYxNjg2YjEtMmE1MTg1Y2QtOTA5ZmU0OWMtOGI4NTI4Mjc=, ActorId: [1:7524679989906126995:2455], ActorState: ExecuteState, TraceId: 01jzmyk22nfxawkxdcs1rfqz3k, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7524679989906127011:2455] from: [1:7524679989906127010:2455] 2025-07-08T12:01:25.601267Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524679989906127011:2455] TxId: 281474976710670. Ctx: { TraceId: 01jzmyk22nfxawkxdcs1rfqz3k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmYxNjg2YjEtMmE1MTg1Y2QtOTA5ZmU0OWMtOGI4NTI4Mjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:25.601316Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmYxNjg2YjEtMmE1MTg1Y2QtOTA5ZmU0OWMtOGI4NTI4Mjc=, ActorId: [1:7524679989906126995:2455], ActorState: ExecuteState, TraceId: 01jzmyk22nfxawkxdcs1rfqz3k, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 62153, MsgBus: 6571 2025-07-08T12:01:25.896522Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679989793615861:2166];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.897769Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b72/r3tmp/tmpTPJOB5/pdisk_1.dat 2025-07-08T12:01:25.905239Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62153, node 2 2025-07-08T12:01:25.915327Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.915341Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.915343Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.915380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6571 TClient is connected to server localhost:6571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.996010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.996031Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.997285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:25.998758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.193319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.196317Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:7524679994088583626:2304], Recipient [2:7524679994088583632:2290]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:01:26.196491Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:7524679994088583626:2304], Recipient [2:7524679994088583632:2290]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:01:26.196569Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7524679994088583632:2290] 2025-07- ... 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010333Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit MoveIndex 2025-07-08T12:01:27.010335Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit CreateCdcStream 2025-07-08T12:01:27.010337Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit CreateCdcStream 2025-07-08T12:01:27.010340Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010341Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit CreateCdcStream 2025-07-08T12:01:27.010343Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit AlterCdcStream 2025-07-08T12:01:27.010344Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit AlterCdcStream 2025-07-08T12:01:27.010348Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010351Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit AlterCdcStream 2025-07-08T12:01:27.010352Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit DropCdcStream 2025-07-08T12:01:27.010354Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit DropCdcStream 2025-07-08T12:01:27.010357Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010359Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit DropCdcStream 2025-07-08T12:01:27.010360Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit CreateIncrementalRestoreSrc 2025-07-08T12:01:27.010362Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit CreateIncrementalRestoreSrc 2025-07-08T12:01:27.010365Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010367Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit CreateIncrementalRestoreSrc 2025-07-08T12:01:27.010369Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit CompleteOperation 2025-07-08T12:01:27.010371Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-07-08T12:01:27.010412Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is DelayComplete 2025-07-08T12:01:27.010414Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit CompleteOperation 2025-07-08T12:01:27.010417Z node 3 :TX_DATASHARD TRACE: Add [1751976087057:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-07-08T12:01:27.010418Z node 3 :TX_DATASHARD TRACE: Trying to execute [1751976087057:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-07-08T12:01:27.010422Z node 3 :TX_DATASHARD TRACE: Execution status for [1751976087057:281474976715662] at 72075186224037888 is Executed 2025-07-08T12:01:27.010424Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [1751976087057:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-07-08T12:01:27.010426Z node 3 :TX_DATASHARD TRACE: Execution plan for [1751976087057:281474976715662] at 72075186224037888 has finished 2025-07-08T12:01:27.010428Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:01:27.010429Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-07-08T12:01:27.010431Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:01:27.010432Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:01:27.010645Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1751976087057} 2025-07-08T12:01:27.010662Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:27.010753Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [3:7524679994154736783:2319], Recipient [3:7524679994154736837:2296]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:01:27.010756Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-07-08T12:01:27.010804Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:01:27.010806Z node 3 :TX_DATASHARD TRACE: Complete execution for [1751976087057:281474976715662] at 72075186224037888 on unit DropTable 2025-07-08T12:01:27.010808Z node 3 :TX_DATASHARD TRACE: Complete execution for [1751976087057:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-07-08T12:01:27.010820Z node 3 :TX_DATASHARD DEBUG: Complete [1751976087057 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [3:7524679994154736458:2150], exec latency: 0 ms, propose latency: 0 ms 2025-07-08T12:01:27.010829Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state PreOffline TxInFly 0 2025-07-08T12:01:27.010838Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:01:27.010851Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:7524679994154736755:2304], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-07-08T12:01:27.011154Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:7524679998449704244:2312], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [3:7524679998449704246:2436] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:27.011159Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:27.011644Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:7524679994154736458:2150], Recipient [3:7524679994154736764:2290]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 2025-07-08T12:01:27.011648Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-07-08T12:01:27.011653Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state PreOffline 2025-07-08T12:01:27.011661Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-07-08T12:01:27.012917Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-07-08T12:01:27.012935Z node 3 :TX_DATASHARD INFO: 72075186224037888 Initiating switch from PreOffline to Offline state 2025-07-08T12:01:27.013292Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:7524679998449704244:2312], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [3:7524679998449704244:2312] ServerId: [3:7524679998449704246:2436] } 2025-07-08T12:01:27.013298Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-07-08T12:01:27.013864Z node 3 :TX_DATASHARD INFO: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2025-07-08T12:01:27.013901Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:7524679994154736755:2304], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-07-08T12:01:27.014014Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:7524679998449704251:2313], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [3:7524679998449704252:2441] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-07-08T12:01:27.014021Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-07-08T12:01:27.014211Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552133, Sender [3:7524679994154736458:2150], Recipient [3:7524679994154736764:2290]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046644480 State: 4 2025-07-08T12:01:27.014213Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-07-08T12:01:27.014216Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-07-08T12:01:27.014230Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:7524679998449704251:2313], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [3:7524679998449704251:2313] ServerId: [3:7524679998449704252:2441] } 2025-07-08T12:01:27.014232Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-07-08T12:01:27.015334Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268829699, Sender [3:7524679994154736755:2304], Recipient [3:7524679994154736764:2290]: NKikimrTabletBase.TEvTabletStop TabletID: 72075186224037888 Reason: ReasonStop 2025-07-08T12:01:27.015349Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-07-08T12:01:27.015362Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [3:7524679994154736910:2402], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:27.015365Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-07-08T12:01:27.015370Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7524679994154736909:2401], serverId# [3:7524679994154736910:2402], sessionId# [0:0:0] 2025-07-08T12:01:27.015535Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-07-08T12:01:27.015547Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [3:7524679994154736755:2304], Recipient [3:7524679994154736764:2290]: NKikimr::TEvTablet::TEvTabletDead 2025-07-08T12:01:27.015626Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-07-08T12:01:27.015649Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-07-08T12:01:27.015702Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 271843840, Sender [3:7524679994154736106:2064], Recipient [3:7524679994154736837:2296]: NKikimr::TEvPipeCache::TEvDeliveryProblem 2025-07-08T12:01:27.015705Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPipeCache::TEvDeliveryProblem 2025-07-08T12:01:27.015708Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_serverless_reboots/unittest >> TSchemeShardServerLessReboots::TestServerlessComputeResourcesModeWithReboots [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:39.705465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:39.705490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:39.705495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:39.705500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:39.705505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:39.705509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:39.705520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:39.705533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:39.705621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:39.726112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:39.726140Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:39.733812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:39.733868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:39.733899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:39.735452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:39.735621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:39.735755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:39.735807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:39.736264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:39.736305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:39.736555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:39.736564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:39.736582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:39.736590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:39.736596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:39.736633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:39.738006Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:39.765783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:39.765875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.765944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:39.765985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:39.765996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.766824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:39.766853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:39.766902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.766911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:39.766916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:39.766921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:39.767247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.767256Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:39.767260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:39.767517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.767525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.767531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:39.767537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:39.767990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:39.774384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:39.774450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:39.774677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:39.774717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:39.774726Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:39.774804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:39.774812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:39.774848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:39.774861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:39.775431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:39.775441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:39.775491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:39.775496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:39.775571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:39.775577Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:39.775589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:39.775594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:39.775598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:39.775601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:39.775604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:39.775609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:25.375612Z node 213 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 4], Generation: 2, ActorId:[213:640:2568], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-07-08T12:01:25.375742Z node 213 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-07-08T12:01:25.375747Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-07-08T12:01:25.375769Z node 213 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-07-08T12:01:25.375773Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [213:733:2635], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-07-08T12:01:25.375920Z node 213 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-07-08T12:01:25.375982Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1007:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:4 msg type: 268697640 2025-07-08T12:01:25.376000Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1007, partId: 0, tablet: 72075186233409546 2025-07-08T12:01:25.376035Z node 213 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 4 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 1007 2025-07-08T12:01:25.376048Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 1007, at schemeshard: 72057594046678944 2025-07-08T12:01:25.376052Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1007, tablet: 72075186233409546, partId: 0 2025-07-08T12:01:25.376065Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1007:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1007 2025-07-08T12:01:25.376072Z node 213 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 1007:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-07-08T12:01:25.376077Z node 213 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1007:0 138 -> 240 2025-07-08T12:01:25.376139Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1007 2025-07-08T12:01:25.376147Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:25.376464Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1007:0, at schemeshard: 72057594046678944 2025-07-08T12:01:25.376496Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1007:0, at schemeshard: 72057594046678944 2025-07-08T12:01:25.376501Z node 213 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1007:0 ProgressState 2025-07-08T12:01:25.376513Z node 213 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1007:0 progress is 1/1 2025-07-08T12:01:25.376516Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-07-08T12:01:25.376520Z node 213 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1007:0 progress is 1/1 2025-07-08T12:01:25.376523Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-07-08T12:01:25.376527Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1007, ready parts: 1/1, is published: true 2025-07-08T12:01:25.376533Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1007 ready parts: 1/1 2025-07-08T12:01:25.376537Z node 213 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1007:0 2025-07-08T12:01:25.376541Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1007:0 2025-07-08T12:01:25.376552Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 TestModificationResult got TxId: 1007, wait until txId: 1007 TestWaitNotification wait txId: 1007 2025-07-08T12:01:25.376963Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: send EvNotifyTxCompletion 2025-07-08T12:01:25.376974Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1007 2025-07-08T12:01:25.377057Z node 213 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1007, at schemeshard: 72057594046678944 2025-07-08T12:01:25.377075Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: got EvNotifyTxCompletionResult 2025-07-08T12:01:25.377079Z node 213 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1007: satisfy waiter [213:869:2751] TestWaitNotification: OK eventTxId 1007 2025-07-08T12:01:25.377158Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:25.377184Z node 213 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 35us result status StatusSuccess 2025-07-08T12:01:25.377257Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:25.377323Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:25.377342Z node 213 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 21us result status StatusSuccess 2025-07-08T12:01:25.377378Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 1004 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:25.377446Z node 213 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:4 2025-07-08T12:01:25.377495Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-07-08T12:01:25.377513Z node 213 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 20us result status StatusSuccess 2025-07-08T12:01:25.377552Z node 213 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 4 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5273, MsgBus: 18506 2025-07-08T12:01:25.333682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679989831377728:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.335281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6c/r3tmp/tmpkZw88i/pdisk_1.dat 2025-07-08T12:01:25.457789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.457817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.461395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:25.463784Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5273, node 1 2025-07-08T12:01:25.505133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.505146Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.505148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.505188Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18506 TClient is connected to server localhost:18506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.587430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.589719Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:25.599639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:25.630202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.662052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.691298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.800812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.814738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.837700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.855977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.870676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.883499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.904549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.119819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.223722Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679994126347629:2475], TxId: 281474976715675, task: 1. Ctx: { TraceId : 01jzmyk2mx28zny9kkg1staans. SessionId : ydb://session/3?node_id=1&id=N2NmODhhZDMtZmNiOGRhODctMTcyOWIwN2YtOTkzNjlhZGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T12:01:26.223820Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524679994126347631:2476], TxId: 281474976715675, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2NmODhhZDMtZmNiOGRhODctMTcyOWIwN2YtOTkzNjlhZGU=. TraceId : 01jzmyk2mx28zny9kkg1staans. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [1:7524679994126347626:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:26.223881Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2NmODhhZDMtZmNiOGRhODctMTcyOWIwN2YtOTkzNjlhZGU=, ActorId: [1:7524679994126347460:2446], ActorState: ExecuteState, TraceId: 01jzmyk2mx28zny9kkg1staans, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 62703, MsgBus: 2304 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6c/r3tmp/tmpo9LSvr/pdisk_1.dat 2025-07-08T12:01:26.528873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:26.538405Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62703, node 2 2025-07-08T12:01:26.553145Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:26.553156Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:26.553158Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:26.553208Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2304 TClient is connected to server localhost:2304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:26.629265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:26.629290Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:26.629614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.630728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:26.633328Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:26.696943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:26.708374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.758930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.771556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.865893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.875755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.884878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.946163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.008778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.016608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.031978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.151533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.205964Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2025-07-08T12:01:27.206041Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037922 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:27.206069Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037922 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:27.206147Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679998216162896:2455], Table: `/Root/TestImmediateEffects` ([72057594046644480:13:1]), SessionActorId: [2:7524679998216162753:2455]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[2:7524679998216162896:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:27.206168Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679998216162880:2455], SessionActorId: [2:7524679998216162753:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7524679998216162753:2455]. isRollback=0 2025-07-08T12:01:27.206219Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQ4NWZjNTUtMmVlNGZkOWQtZTRjNjA1Mi05MTA0MTQzNw==, ActorId: [2:7524679998216162753:2455], ActorState: ExecuteState, TraceId: 01jzmyk3m68hdhcbkefjj5rzhj, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7524679998216162890:2455] from: [2:7524679998216162880:2455] 2025-07-08T12:01:27.206375Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524679998216162890:2455] TxId: 281474976715673. Ctx: { TraceId: 01jzmyk3m68hdhcbkefjj5rzhj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjQ4NWZjNTUtMmVlNGZkOWQtZTRjNjA1Mi05MTA0MTQzNw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:27.206424Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjQ4NWZjNTUtMmVlNGZkOWQtZTRjNjA1Mi05MTA0MTQzNw==, ActorId: [2:7524679998216162753:2455], ActorState: ExecuteState, TraceId: 01jzmyk3m68hdhcbkefjj5rzhj, Create QueryResponse for error on request, msg: >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 1537, MsgBus: 14047 2025-07-08T12:01:24.987233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679987436290530:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:24.987515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b70/r3tmp/tmpTPtZuY/pdisk_1.dat 2025-07-08T12:01:25.058375Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1537, node 1 2025-07-08T12:01:25.084725Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.084738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.084739Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.084770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14047 2025-07-08T12:01:25.121440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.121465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.122651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.145706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.185466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:25.221641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.242316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.255890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.437140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.457885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.469175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.482781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.495600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.552628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.572999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.740938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.979629Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 11370, MsgBus: 9072 2025-07-08T12:01:26.192078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679994420625676:2230];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:26.192194Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b70/r3tmp/tmpo4yKBm/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11370, node 2 2025-07-08T12:01:26.222207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:26.223216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:26.223227Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:26.223228Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:26.223261Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9072 TClient is connected to server localhost:9072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:26.283169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:26.283199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:26.284511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:26.297555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.301356Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:26.416971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.442302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:26.481771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.514041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.557746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.567834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.622676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.631629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.646473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.661164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.674972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.851272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.868372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.885252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.189095Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpInplaceUpdate::SingleRowArithm-UseSink >> KqpImmediateEffects::InsertDuplicates+UseSink >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31259, MsgBus: 6266 2025-07-08T12:01:25.244851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679990906089868:2066];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:25.244877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6e/r3tmp/tmp7Z9DsP/pdisk_1.dat 2025-07-08T12:01:25.305297Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31259, node 1 2025-07-08T12:01:25.319742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:25.319755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:25.319757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:25.319798Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6266 2025-07-08T12:01:25.348412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:25.348432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:25.349495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:25.374099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.378388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:25.382188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.418843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.444153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.481598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:25.656186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.712460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.722929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.738500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.750964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.768777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.781622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:25.932272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19804, MsgBus: 4992 2025-07-08T12:01:26.258567Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679994654395611:2200];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:26.259019Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6e/r3tmp/tmpSPKRCh/pdisk_1.dat 2025-07-08T12:01:26.267448Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19804, node 2 2025-07-08T12:01:26.278132Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:26.278144Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:26.278146Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:26.278184Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4992 TClient is connected to server localhost:4992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:26.360874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:26.360901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:26.361328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.361786Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:26.362987Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:26.369113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.381808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.400860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.414618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:26.601928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.609395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.666370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.675210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.687959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.704247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.716601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.865165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.906893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:26.914759Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_SCHEME_CHANGED;details=Cannot parse tx 3. SCHEME_CHANGED: Table '/Root/TestTable' scheme changed. at tablet# 72075186224037922;tx_id=3; 2025-07-08T12:01:26.916344Z node 2 :TX_DATASHARD ERROR: Cannot parse tx 3. SCHEME_CHANGED: Table '/Root/TestTable' scheme changed. 2025-07-08T12:01:26.916425Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679994654398074:2455], Table: `/Root/TestTable` ([72057594046644480:13:1]), SessionActorId: [2:7524679994654397920:2455]Got SCHEME CHANGED for table `/Root/TestTable`. ShardID=72075186224037922, Sink=[2:7524679994654398074:2455].{
: Error: Cannot parse tx 3. SCHEME_CHANGED: Table '/Root/TestTable' scheme changed. at tablet# 72075186224037922, code: 2034 } 2025-07-08T12:01:26.916575Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524679994654398067:2455], SessionActorId: [2:7524679994654397920:2455], statusCode=SCHEME_ERROR. Issue=
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028
: Error: Cannot parse tx 3. SCHEME_CHANGED: Table '/Root/TestTable' scheme changed. at tablet# 72075186224037922, code: 2034 . sessionActorId=[2:7524679994654397920:2455]. isRollback=0 2025-07-08T12:01:26.916658Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFlNjUwYjgtOTFlYmIxNzItNzkyOWJkZTItM2FmY2ZlNjA=, ActorId: [2:7524679994654397920:2455], ActorState: ExecuteState, TraceId: 01jzmyk3c115rfqax546vfz0t2, got TEvKqpBuffer::TEvError in ExecuteState, status: SCHEME_ERROR send to: [2:7524679994654398068:2455] from: [2:7524679994654398067:2455] 2025-07-08T12:01:26.916775Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524679994654398068:2455] TxId: 281474976715672. Ctx: { TraceId: 01jzmyk3c115rfqax546vfz0t2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTFlNjUwYjgtOTFlYmIxNzItNzkyOWJkZTItM2FmY2ZlNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. SCHEME_ERROR: {
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028 subissue: {
: Error: Cannot parse tx 3. SCHEME_CHANGED: Table '/Root/TestTable' scheme changed. at tablet# 72075186224037922, code: 2034 } } 2025-07-08T12:01:26.916837Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFlNjUwYjgtOTFlYmIxNzItNzkyOWJkZTItM2FmY2ZlNjA=, ActorId: [2:7524679994654397920:2455], ActorState: ExecuteState, TraceId: 01jzmyk3c115rfqax546vfz0t2, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 9945, MsgBus: 13084 2025-07-08T12:01:27.216492Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524679999829069004:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:27.216514Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6e/r3tmp/tmpMzV5gZ/pdisk_1.dat 2025-07-08T12:01:27.243150Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9945, node 3 2025-07-08T12:01:27.249453Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:27.249469Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:27.249471Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:27.249513Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13084 TClient is connected to server localhost:13084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:27.317205Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:27.317239Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:27.318630Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:27.325699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.327224Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:27.340322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:27.361970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:27.392786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:27.405332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:27.554990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.566383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.577150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.590956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.605602Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.619636Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.637668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.807473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.850005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-07-08T12:01:27.855611Z node 3 :TX_DATASHARD ERROR: Shard 72075186224037922 cannot parse tx 281474976715672: Table '/Root/TestTable' scheme changed. 2025-07-08T12:01:27.855667Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7524679999829071560:2446] TxId: 281474976715672. Ctx: { TraceId: 01jzmyk49ec6rgayq43vch6cqs, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTg5NDNkMDEtOTdkZmVhMmYtNjJmNWJkZDktNDk2ZTM5YWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ERROR: [SCHEME_CHANGED] Table '/Root/TestTable' scheme changed.; 2025-07-08T12:01:27.855735Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTg5NDNkMDEtOTdkZmVhMmYtNjJmNWJkZDktNDk2ZTM5YWU=, ActorId: [3:7524679999829071426:2446], ActorState: ExecuteState, TraceId: 01jzmyk49ec6rgayq43vch6cqs, Create QueryResponse for error on request, msg: >> KqpWrite::InsertRevert >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_olap_reboots/unittest >> TOlapReboots::CreateDropStore [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T11:59:55.331808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T11:59:55.331828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.331833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T11:59:55.331838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T11:59:55.331844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T11:59:55.331847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T11:59:55.331853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T11:59:55.331867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T11:59:55.331923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T11:59:55.350225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T11:59:55.350254Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.353818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T11:59:55.353863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T11:59:55.353900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T11:59:55.355548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T11:59:55.355690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T11:59:55.355809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.355866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T11:59:55.356414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.356459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T11:59:55.356689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.356699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.356718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T11:59:55.356726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.356732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T11:59:55.356809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T11:59:55.358108Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T11:59:55.375174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T11:59:55.375237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.375297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T11:59:55.375339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T11:59:55.375350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T11:59:55.376147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T11:59:55.376162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T11:59:55.376167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T11:59:55.376552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T11:59:55.376918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.376933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.376940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.377519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T11:59:55.378013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T11:59:55.378053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T11:59:55.378229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T11:59:55.378253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T11:59:55.378260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.378352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T11:59:55.378359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T11:59:55.378386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T11:59:55.378397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T11:59:55.378764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T11:59:55.378772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T11:59:55.378816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T11:59:55.378821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T11:59:55.378903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T11:59:55.378910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T11:59:55.378922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.378926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.378931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T11:59:55.378934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T11:59:55.378939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T11:59:55.378943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 0004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.300458Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 824633722989 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:27.300464Z node 192 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TPropose operationId# 1003:0 HandleReply TEvOperationPlan at schemeshard: 72057594046678944, stepId: 5000004 2025-07-08T12:01:27.300487Z node 192 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-07-08T12:01:27.300508Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:27.300517Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: Erasing txId 1003 2025-07-08T12:01:27.300927Z node 192 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:27.300935Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:27.300976Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-07-08T12:01:27.300998Z node 192 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:27.301002Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [192:206:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-07-08T12:01:27.301006Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [192:206:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-07-08T12:01:27.301098Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.301106Z node 192 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedWaitParts operationId# 1003:0 ProgressState at schemeshard: 72057594046678944 2025-07-08T12:01:27.301113Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TDropOlapStore TProposedWaitParts operationId# 1003:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-07-08T12:01:27.301189Z node 192 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.301200Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.301204Z node 192 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:01:27.301208Z node 192 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-07-08T12:01:27.301213Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-07-08T12:01:27.301277Z node 192 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.301285Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.301288Z node 192 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-07-08T12:01:27.301291Z node 192 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:01:27.301295Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:27.301302Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-07-08T12:01:27.301879Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1003:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-07-08T12:01:27.301903Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1003, partId: 0, tablet: 72075186233409546 2025-07-08T12:01:27.302300Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-07-08T12:01:27.302312Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-07-08T12:01:27.302324Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 1003 2025-07-08T12:01:27.302332Z node 192 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 130 2025-07-08T12:01:27.302600Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.302629Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-07-08T12:01:27.302871Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.302896Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.302901Z node 192 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:27.302915Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-07-08T12:01:27.302950Z node 192 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:01:27.302954Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:27.302957Z node 192 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-07-08T12:01:27.302959Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:27.302961Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-07-08T12:01:27.302964Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-07-08T12:01:27.302967Z node 192 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-07-08T12:01:27.302973Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-07-08T12:01:27.302990Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-07-08T12:01:27.303294Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-07-08T12:01:27.303358Z node 192 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-07-08T12:01:27.303417Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.303546Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-07-08T12:01:27.303694Z node 192 :TX_COLUMNSHARD WARN: tablet_id=72075186233409546;self_id=[192:334:2321];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:863;event=tablet_die; Forgetting tablet 72075186233409546 2025-07-08T12:01:27.304605Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:27.304617Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-07-08T12:01:27.304632Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:27.310157Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-07-08T12:01:27.310179Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-07-08T12:01:27.310288Z node 192 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 1003 2025-07-08T12:01:27.310328Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-07-08T12:01:27.310333Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-07-08T12:01:27.310392Z node 192 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:27.310409Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:27.310413Z node 192 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [192:453:2424] TestWaitNotification: OK eventTxId 1003 2025-07-08T12:01:27.310464Z node 192 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:27.310495Z node 192 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 45us result status StatusPathDoesNotExist 2025-07-08T12:01:27.310526Z node 192 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1147" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> KqpImmediateEffects::DeleteAfterUpsert >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-07-08T12:01:06.480795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679908721091799:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.480821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000fe1/r3tmp/tmpKc4t07/pdisk_1.dat 2025-07-08T12:01:06.587591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:06.599920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.599948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.600979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18218, node 1 2025-07-08T12:01:06.639528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:06.639541Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:06.639542Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:06.639591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:06.702223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:63595 2025-07-08T12:01:07.065603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.119471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067149 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067149 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-07-08T12:01:07.177780Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyjg2r3apdf2fvkv4m39gm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmNmZTItOGIzMzQ4OGMtZWMwNzNjZTYtMmZkYzU1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyjg2d981n1rq3j6rfyc26, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZDEyOTQtNWY0YTQ2MjAtY2YwZjgzODEtNzg4ZjIxM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyjg2daz6jp3rrf8wvrqmj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNGZjZjEtZjBhMDkwNGEtYWFkMGU5MDAtNzRhZGQ4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyjg2r35sb1gdgkhc3jd1r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q3YzIwODQtNjcxY2Q5NzMtYzIwNjQwYzQtMWE4OWMzNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyjg2d20yxta1b2h884rf1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3Zjk4NDItMWE5YTY5OWItZDM0ZjJkNmYtZmJhM2JmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyjg2da40npjjvnb65gtdr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZjMwM2EtNGY2NDgyMmQtNjFjOWE4YTAtZDNmZTk5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179308Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyjg2decwdgsxdq6pv1nds, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ODM1NGMtY2VmMTlhZjUtZjY4ZDRjMS1mNGRiNTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179362Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyjg2r3dnkfkhqd27qqn7j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2ZDU3ZmUtMTczZGI3YWUtNTg4N2Y5MGEtZTgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.179645Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyjg2ddry39b4ecezrr48y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiMzA2Y2UtOTY0NWY2ZjUtZWU3M2JkYzAtNmMyY2Q5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.181102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyjg2jd38n1btxxes65qbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiOGVmODgtMzdkZDAxNGItNTE5ZGQ4ZDUtYzIyODZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.204323Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyjg42f0tbh1k0cmyz86px, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2ZDU3ZmUtMTczZGI3YWUtNTg4N2Y5MGEtZTgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.204442Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjg43f952bze5neqm5d51, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q3YzIwODQtNjcxY2Q5NzMtYzIwNjQwYzQtMWE4OWMzNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.204498Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyjg4331xfh5petjgw00tx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiOGVmODgtMzdkZDAxNGItNTE5ZGQ4ZDUtYzIyODZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.205696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyjg44agqrqeg95nwekrqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3Zjk4NDItMWE5YTY5OWItZDM0ZjJkNmYtZmJhM2JmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.210676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyjg4906rmds1qt6eaa3ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmNmZTItOGIzMzQ4OGMtZWMwNzNjZTYtMmZkYzU1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.210788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyjg49esz2t2x3y1a4sm11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZjMwM2EtNGY2NDgyMmQtNjFjOWE4YTAtZDNmZTk5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.210842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyjg49bvrfzdsyprz7t12j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ODM1NGMtY2VmMTlhZjUtZjY4ZDRjMS1mNGRiNTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.210893Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyjg493hzks5sm3xctvpc5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiMzA2Y2UtOTY0NWY2ZjUtZWU3M2JkYzAtNmMyY2Q5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.212098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyjg49dzzh354pwhe2vqh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZDEyOTQtNWY0YTQ2MjAtY2YwZjgzODEtNzg4ZjIxM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.212210Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyjg4913p66e7xwck1v32h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNGZjZjEtZjBhMDkwNGEtYWFkMGU5MDAtNzRhZGQ4Nzc=, Curre ... e: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3Zjk4NDItMWE5YTY5OWItZDM0ZjJkNmYtZmJhM2JmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737787. Ctx: { TraceId: 01jzmyk3kp0hjmmmafprj7gqmx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmNmZTItOGIzMzQ4OGMtZWMwNzNjZTYtMmZkYzU1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160226Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737792. Ctx: { TraceId: 01jzmyk3kp6wxsnqa31ajrwzt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiMzA2Y2UtOTY0NWY2ZjUtZWU3M2JkYzAtNmMyY2Q5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160233Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737788. Ctx: { TraceId: 01jzmyk3kp2jzcz7hrd2badjn3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZjMwM2EtNGY2NDgyMmQtNjFjOWE4YTAtZDNmZTk5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160295Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737793. Ctx: { TraceId: 01jzmyk3kq989h5mycj33ybz99, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ODM1NGMtY2VmMTlhZjUtZjY4ZDRjMS1mNGRiNTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737789. Ctx: { TraceId: 01jzmyk3kpacm9eef2dkna8w33, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiOGVmODgtMzdkZDAxNGItNTE5ZGQ4ZDUtYzIyODZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160404Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737790. Ctx: { TraceId: 01jzmyk3kp6v33gqbv2xyf7erz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZDEyOTQtNWY0YTQ2MjAtY2YwZjgzODEtNzg4ZjIxM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.160461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737791. Ctx: { TraceId: 01jzmyk3kp4ka26fw4ed0fvhz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2ZDU3ZmUtMTczZGI3YWUtNTg4N2Y5MGEtZTgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.167697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737798. Ctx: { TraceId: 01jzmyk3kyc6sn4sge27n2e4g2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiOGVmODgtMzdkZDAxNGItNTE5ZGQ4ZDUtYzIyODZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.167732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737796. Ctx: { TraceId: 01jzmyk3ky3sgc604x7dwtsyvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiMzA2Y2UtOTY0NWY2ZjUtZWU3M2JkYzAtNmMyY2Q5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.167808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737795. Ctx: { TraceId: 01jzmyk3ky78esazfzv7qspxvn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNGZjZjEtZjBhMDkwNGEtYWFkMGU5MDAtNzRhZGQ4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.167858Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737797. Ctx: { TraceId: 01jzmyk3ky7jk5cxhtgfjet4bz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ODM1NGMtY2VmMTlhZjUtZjY4ZDRjMS1mNGRiNTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.167910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737799. Ctx: { TraceId: 01jzmyk3ky8zxbqrr0epfx868k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmNmZTItOGIzMzQ4OGMtZWMwNzNjZTYtMmZkYzU1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.168152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737800. Ctx: { TraceId: 01jzmyk3ky7d6k8hfn28b3cm2x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q3YzIwODQtNjcxY2Q5NzMtYzIwNjQwYzQtMWE4OWMzNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.168242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737801. Ctx: { TraceId: 01jzmyk3ky9p27f8bbfrpphkcn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZDEyOTQtNWY0YTQ2MjAtY2YwZjgzODEtNzg4ZjIxM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.168549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737802. Ctx: { TraceId: 01jzmyk3kzewkrq23pn47x94jk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2ZDU3ZmUtMTczZGI3YWUtNTg4N2Y5MGEtZTgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.168755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737803. Ctx: { TraceId: 01jzmyk3kz5kxc4qasspqsyrx0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZjMwM2EtNGY2NDgyMmQtNjFjOWE4YTAtZDNmZTk5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.171808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737805. Ctx: { TraceId: 01jzmyk3m330s5d0vnpcex306p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3Zjk4NDItMWE5YTY5OWItZDM0ZjJkNmYtZmJhM2JmZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.171901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737804. Ctx: { TraceId: 01jzmyk3m3es23q3zffadw3knb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFiOGVmODgtMzdkZDAxNGItNTE5ZGQ4ZDUtYzIyODZmZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.171926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737806. Ctx: { TraceId: 01jzmyk3m368xstgck6eta1kc7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiMzA2Y2UtOTY0NWY2ZjUtZWU3M2JkYzAtNmMyY2Q5YTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.172227Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737807. Ctx: { TraceId: 01jzmyk3m31rysk9yj20feqhfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE2ODM1NGMtY2VmMTlhZjUtZjY4ZDRjMS1mNGRiNTViOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.172321Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737808. Ctx: { TraceId: 01jzmyk3m394avc8264hcc79q1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q3YzIwODQtNjcxY2Q5NzMtYzIwNjQwYzQtMWE4OWMzNzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-07-08T12:01:27.173590Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737810. Ctx: { TraceId: 01jzmyk3m504ydp50p5hk0gnrq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY2ZDU3ZmUtMTczZGI3YWUtNTg4N2Y5MGEtZTgxZGJjNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.173681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737809. Ctx: { TraceId: 01jzmyk3m5c6vs611xg3fkqr3j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlNGZjZjEtZjBhMDkwNGEtYWFkMGU5MDAtNzRhZGQ4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067149 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-07-08T12:01:27.174424Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737811. Ctx: { TraceId: 01jzmyk3m5b2nf96hpj7gxjsnm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzZDEyOTQtNWY0YTQ2MjAtY2YwZjgzODEtNzg4ZjIxM2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.174451Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737812. Ctx: { TraceId: 01jzmyk3m54p2fn03ff22544ak, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2YxMmNmZTItOGIzMzQ4OGMtZWMwNzNjZTYtMmZkYzU1N2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.175006Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976737813. Ctx: { TraceId: 01jzmyk3m5agtpj7g1wwq9304e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZhZjMwM2EtNGY2NDgyMmQtNjFjOWE4YTAtZDNmZTk5MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067149 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-07-08T12:01:27.630921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 10816 rowCount 121 cpuUsage 0 2025-07-08T12:01:27.633438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 29504 rowCount 333 cpuUsage 0 2025-07-08T12:01:27.731581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-07-08T12:01:27.731651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 121, DataSize 10816 2025-07-08T12:01:27.731704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 333, DataSize 29504 2025-07-08T12:01:27.732143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink |69.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleTable [GOOD] |69.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::ManyFlushes |69.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |69.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> KqpWrite::InsertRevert [GOOD] >> KqpWrite::ProjectReplace+UseSink >> KqpInplaceUpdate::SingleRowIf-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-07-08T12:01:06.991278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679909014224987:2232];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.991346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/00103e/r3tmp/tmpBGlsiu/pdisk_1.dat 2025-07-08T12:01:07.144997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:07.145023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:07.146385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7257, node 1 2025-07-08T12:01:07.179671Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:07.179681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:07.179683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:07.179729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:07.179928Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:10865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:07.201376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10865 2025-07-08T12:01:07.488139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.571290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-07-08T12:01:07.636788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jzmyjggkdtbrbdpg2sca2c5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkZjkyMDQtNTQ5ODU0NWEtZmYwZWFiZTYtMzA4MTQ2NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.637367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jzmyjgh2d8gdvq1z3khm2k11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jzmyjggj3fj93qbk8b8jymxp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNWRmODgtNDMwZjcxZDctNGVkYzNmYzctNzE4Y2RiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jzmyjggkcvfzhdvzm1d3xcfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MTRlNi1jYmFjNTkzYS1hMTk2Y2IwYi1iMmQ1MWNmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638287Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jzmyjgh315v6w2n08jtr4fnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMTdmYzAtYmRlMTJhOGEtNTViNDQyMzMtMzIyZmRhMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jzmyjgh00d2c18m31mmwc27y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlZDY0NzktM2ZkNzE5MWItZDU2Y2QyZDctMjIzYzk5MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638412Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jzmyjggj6y808ebmdwvyd1sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNTk1NzgtNTQ2MDNiM2QtNTczZjNlNy0xYzEyNzJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638470Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jzmyjggj1apnd6depw6nneqe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1MzRjMzktNGRkNTJlYzEtNWRjNWZiMzAtYzY2OWYyN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.638537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyjgh37e7v2rxqt3smvnsv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3N2JmYjUtYWY0YTIyM2YtNDYxZWY0ZWMtYTkxNzUxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.646965Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jzmyjgh1c77pz2vwzefk96pn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJhYjI2YzEtZmRmZmM2ZjQtZmMyZGE5YWUtMmNhMTJiNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660051Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jzmyjgj949shpz545hv1yqxh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660163Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmyjgj9dt5zyq87wjdv574p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMTdmYzAtYmRlMTJhOGEtNTViNDQyMzMtMzIyZmRhMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jzmyjgj96eb3x8fz12ba02xv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlZDY0NzktM2ZkNzE5MWItZDU2Y2QyZDctMjIzYzk5MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jzmyjgj9ckse74p8xx14ac6g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNTk1NzgtNTQ2MDNiM2QtNTczZjNlNy0xYzEyNzJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660310Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jzmyjgj91wk817csr4pxra1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1MzRjMzktNGRkNTJlYzEtNWRjNWZiMzAtYzY2OWYyN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jzmyjgj9cvd1gm92svqwaekb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkZjkyMDQtNTQ5ODU0NWEtZmYwZWFiZTYtMzA4MTQ2NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660394Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jzmyjgj92hfgpk7z6zkwznj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MTRlNi1jYmFjNTkzYS1hMTk2Y2IwYi1iMmQ1MWNmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660454Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jzmyjgj9a0vp2mfachqsx03d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNWRmODgtNDMwZjcxZDctNGVkYzNmYzctNzE4Y2RiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.660782Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jzmyjgjacx51zxzv09c2m1et, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3N2JmYjUtYWY0YTIyM2YtNDYxZWY0ZWMtYTkxNzUxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:07.662644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jzmyjgjbdzaq1tt2dwzjm4bk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJhYjI2YzEtZmRmZmM2ZjQtZmMyZGE5YWUtMmNhMTJiNDY=, Curren ... ionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.605335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738714. Ctx: { TraceId: 01jzmyk41m4avzd4ejshna1agt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMTdmYzAtYmRlMTJhOGEtNTViNDQyMzMtMzIyZmRhMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.605367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738715. Ctx: { TraceId: 01jzmyk41m3xp75ntere1m0dax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlZDY0NzktM2ZkNzE5MWItZDU2Y2QyZDctMjIzYzk5MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.605391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738717. Ctx: { TraceId: 01jzmyk41m9fhbkqfsqf22v7qz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.605397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738716. Ctx: { TraceId: 01jzmyk41m69heaakg0b6ybkpt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNTk1NzgtNTQ2MDNiM2QtNTczZjNlNy0xYzEyNzJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.608437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738719. Ctx: { TraceId: 01jzmyk41r2m1x3vmsd2s0vhjq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.608505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738720. Ctx: { TraceId: 01jzmyk41r6bygzkkpy90ph9sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNTk1NzgtNTQ2MDNiM2QtNTczZjNlNy0xYzEyNzJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.609285Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738721. Ctx: { TraceId: 01jzmyk41rb8e9z9s4jz6wxp9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1MzRjMzktNGRkNTJlYzEtNWRjNWZiMzAtYzY2OWYyN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.609337Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738722. Ctx: { TraceId: 01jzmyk41r63s5mt4f1maetfd3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlZDY0NzktM2ZkNzE5MWItZDU2Y2QyZDctMjIzYzk5MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.609806Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738723. Ctx: { TraceId: 01jzmyk41rbshprj5847ee3h91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkZjkyMDQtNTQ5ODU0NWEtZmYwZWFiZTYtMzA4MTQ2NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.610278Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738724. Ctx: { TraceId: 01jzmyk41r4rjv19ctvmv3tqcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MTRlNi1jYmFjNTkzYS1hMTk2Y2IwYi1iMmQ1MWNmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.610411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738725. Ctx: { TraceId: 01jzmyk41s3aext1gsx1rh9h48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMTdmYzAtYmRlMTJhOGEtNTViNDQyMzMtMzIyZmRhMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.610587Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738727. Ctx: { TraceId: 01jzmyk41sc1gcdk5afdfsw9zv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNWRmODgtNDMwZjcxZDctNGVkYzNmYzctNzE4Y2RiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.610779Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738726. Ctx: { TraceId: 01jzmyk41sds8m8syk7n5ysd42, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJhYjI2YzEtZmRmZmM2ZjQtZmMyZGE5YWUtMmNhMTJiNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.610864Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738728. Ctx: { TraceId: 01jzmyk41s13xh4v41ja4y3f6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3N2JmYjUtYWY0YTIyM2YtNDYxZWY0ZWMtYTkxNzUxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.611939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738729. Ctx: { TraceId: 01jzmyk41v3d5jb11eq3rw6hsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.615709Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738730. Ctx: { TraceId: 01jzmyk41z8hep4x08hr93r6z0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGFmNTk1NzgtNTQ2MDNiM2QtNTczZjNlNy0xYzEyNzJlZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.615753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738731. Ctx: { TraceId: 01jzmyk41z8qc2cgtcaczdzaaf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWMyMTdmYzAtYmRlMTJhOGEtNTViNDQyMzMtMzIyZmRhMzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.615797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738733. Ctx: { TraceId: 01jzmyk41z5fq13dk7c0f9q0tj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzFlZDY0NzktM2ZkNzE5MWItZDU2Y2QyZDctMjIzYzk5MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.615862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738732. Ctx: { TraceId: 01jzmyk41z5w4p2sc0fbrn4pv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZkZjkyMDQtNTQ5ODU0NWEtZmYwZWFiZTYtMzA4MTQ2NzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738737. Ctx: { TraceId: 01jzmyk41z1rjedk38kvx66dxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWExYjk2MjItMTJkNjcxYzQtNGQzNGNmNTAtNjg2YzZiYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738736. Ctx: { TraceId: 01jzmyk41zasdwjy9r81b59kyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJhYjI2YzEtZmRmZmM2ZjQtZmMyZGE5YWUtMmNhMTJiNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738734. Ctx: { TraceId: 01jzmyk41z1mabyxpscrxd6b00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI1MzRjMzktNGRkNTJlYzEtNWRjNWZiMzAtYzY2OWYyN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616687Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738735. Ctx: { TraceId: 01jzmyk41z41t51hptcn637cqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3N2JmYjUtYWY0YTIyM2YtNDYxZWY0ZWMtYTkxNzUxMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616753Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738739. Ctx: { TraceId: 01jzmyk41z8b43e7hxrrm9qpzp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTE1MTRlNi1jYmFjNTkzYS1hMTk2Y2IwYi1iMmQ1MWNmNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:27.616810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976738738. Ctx: { TraceId: 01jzmyk41z5wswxtzqpxwcxksg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNmNWRmODgtNDMwZjcxZDctNGVkYzNmYzctNzE4Y2RiODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1751976067590 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-07-08T12:01:27.647199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-07-08T12:01:27.647263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 11073, DataSize 676068 2025-07-08T12:01:27.647710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-07-08T12:01:28.055915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 684772 rowCount 11137 cpuUsage 0 2025-07-08T12:01:28.055940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 749692 rowCount 11901 cpuUsage 0 2025-07-08T12:01:28.158852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-07-08T12:01:28.158929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 11137, DataSize 684772 2025-07-08T12:01:28.158987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 11901, DataSize 749692 2025-07-08T12:01:28.161064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards >> BsControllerTest::TestLocalBrokenRelocation >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20770, MsgBus: 4887 2025-07-08T12:01:28.545054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680003850075386:2067];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.545088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b69/r3tmp/tmpFmgESf/pdisk_1.dat 2025-07-08T12:01:28.605520Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20770, node 1 2025-07-08T12:01:28.617834Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.617846Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.617847Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.617879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4887 2025-07-08T12:01:28.646505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.646527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:28.647626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:28.686773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.702383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.773762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.806119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.842324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.954842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.968807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.977876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.991133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.997944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.015270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.025809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.170779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15224, MsgBus: 61263 2025-07-08T12:01:29.536734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680008682535815:2073];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.536747Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b69/r3tmp/tmpus0DsG/pdisk_1.dat 2025-07-08T12:01:29.553890Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15224, node 2 2025-07-08T12:01:29.564799Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.564811Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.564813Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.564854Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61263 TClient is connected to server localhost:61263 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.637217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.637243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.638302Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:29.641574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.642883Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.657625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.716203Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.784142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.797114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.896055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.910769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.921667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.936325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.949717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.959760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.974519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.117184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> BsControllerTest::SelfHealBlock4Plus2 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18470, MsgBus: 10394 2025-07-08T12:01:28.897280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680002534052503:2205];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.897382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b66/r3tmp/tmpPUXXTy/pdisk_1.dat 2025-07-08T12:01:28.954094Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18470, node 1 2025-07-08T12:01:28.972959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.972972Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.972974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.973015Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10394 2025-07-08T12:01:28.996345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.996370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:28.998433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.036380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.062921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:29.081623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.102794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.112457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.257424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.264252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.321076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.333962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.349354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.361703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.378147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.525355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64383, MsgBus: 64403 2025-07-08T12:01:29.772673Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680006712238497:2074];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b66/r3tmp/tmpRpuU7P/pdisk_1.dat 2025-07-08T12:01:29.783167Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TServer::EnableGrpc on GrpcPort 64383, node 2 2025-07-08T12:01:29.795308Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:29.797055Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.797065Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.797068Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.797111Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64403 TClient is connected to server localhost:64403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.872001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.872029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.873366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:29.874882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.876113Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:29.902596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.921708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.942516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:29.963419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.097129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.107981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.117494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.132217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.147603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.159689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.173208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.314088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 15437, MsgBus: 1945 2025-07-08T12:01:28.600644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680005060148964:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.600672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6b/r3tmp/tmpYYIutA/pdisk_1.dat 2025-07-08T12:01:28.661890Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15437, node 1 2025-07-08T12:01:28.681251Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.681267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.681269Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.681317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:28.701192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.701223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:1945 2025-07-08T12:01:28.705377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:28.753431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.755137Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:28.760272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.793880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.813823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.836524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.981831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.996210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.004452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.018838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.035364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.047600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.060403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.194437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.252704Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2025-07-08T12:01:29.254747Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037922 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:29.254794Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037922 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:29.254854Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680009355118856:2455], Table: `/Root/TestImmediateEffects` ([72057594046644480:13:1]), SessionActorId: [1:7524680009355118711:2455]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037922, Sink=[1:7524680009355118856:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:29.254942Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680009355118840:2455], SessionActorId: [1:7524680009355118711:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7524680009355118711:2455]. isRollback=0 2025-07-08T12:01:29.255012Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWE2NWI1ZDMtZjA4MThiN2UtN2JmZjAwOGItZjc0OTI5Mjg=, ActorId: [1:7524680009355118711:2455], ActorState: ExecuteState, TraceId: 01jzmyk5m31tz51s84qt7wqw43, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7524680009355118850:2455] from: [1:7524680009355118840:2455] 2025-07-08T12:01:29.255114Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680009355118850:2455] TxId: 281474976715673. Ctx: { TraceId: 01jzmyk5m31tz51s84qt7wqw43, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE2NWI1ZDMtZjA4MThiN2UtN2JmZjAwOGItZjc0OTI5Mjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:29.255171Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWE2NWI1ZDMtZjA4MThiN2UtN2JmZjAwOGItZjc0OTI5Mjg=, ActorId: [1:7524680009355118711:2455], ActorState: ExecuteState, TraceId: 01jzmyk5m31tz51s84qt7wqw43, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 3578, MsgBus: 29442 2025-07-08T12:01:29.610854Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680007916187849:2093];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.611994Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6b/r3tmp/tmpXbWkLv/pdisk_1.dat 2025-07-08T12:01:29.624222Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3578, node 2 2025-07-08T12:01:29.637503Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.637516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.637518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.637568Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29442 TClient is connected to server localhost:29442 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.712907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.712965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.713296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.713984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:29.719952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.734094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:29.755837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.766868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.969761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.981140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.037180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.092364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.104662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.117327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.134429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.289515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.388581Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680012211157795:2482], TxId: 281474976715676, task: 1. Ctx: { TraceId : 01jzmyk6qd61bfszzhx0xyd0eb. SessionId : ydb://session/3?node_id=2&id=NmNjM2VjMjMtZmRmMGYxNjItZjJjMWJiZWYtMmRmOTRhZGI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:30.388656Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680012211157797:2483], TxId: 281474976715676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NmNjM2VjMjMtZmRmMGYxNjItZjJjMWJiZWYtMmRmOTRhZGI=. CustomerSuppliedId : . TraceId : 01jzmyk6qd61bfszzhx0xyd0eb. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [2:7524680012211157792:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:30.388717Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmNjM2VjMjMtZmRmMGYxNjItZjJjMWJiZWYtMmRmOTRhZGI=, ActorId: [2:7524680012211157541:2446], ActorState: ExecuteState, TraceId: 01jzmyk6qd61bfszzhx0xyd0eb, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1077, MsgBus: 8078 2025-07-08T12:01:28.788862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680005536832680:2065];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.788886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b67/r3tmp/tmpPsanm8/pdisk_1.dat 2025-07-08T12:01:28.859494Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1077, node 1 2025-07-08T12:01:28.881210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.881219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.881221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.881258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8078 TClient is connected to server localhost:8078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:28.929280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.929315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:28.930466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:28.942857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.956463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.984469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.004608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.017015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.164675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.173051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.187395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.201114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.214555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.229873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.242721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.400513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15170, MsgBus: 1717 2025-07-08T12:01:29.964644Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680009565930821:2073];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.965568Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b67/r3tmp/tmpgmHlKi/pdisk_1.dat 2025-07-08T12:01:29.981946Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15170, node 2 2025-07-08T12:01:29.990359Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.990372Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.990374Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.990420Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1717 TClient is connected to server localhost:1717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.064888Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.064921Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.065988Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:30.067252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:30.113337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.125023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.147080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.163830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.325447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.333648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.341021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.355877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.369787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.384584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.398673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 28351, MsgBus: 3517 2025-07-08T12:01:28.687122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680004764956533:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.687331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b68/r3tmp/tmp4zFhP1/pdisk_1.dat TServer::EnableGrpc on GrpcPort 28351, node 1 2025-07-08T12:01:28.759533Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-07-08T12:01:28.759549Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-07-08T12:01:28.759599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:28.777145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.777162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.777164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.777219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:28.787731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.787764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:28.793318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3517 TClient is connected to server localhost:3517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:28.840373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:28.849404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.868418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:28.896619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:28.964888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.083883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.145638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.156151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.211967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.223039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.235803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.252409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.400641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4562, MsgBus: 28573 2025-07-08T12:01:29.865173Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680005850213876:2118];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.866406Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b68/r3tmp/tmpl0V5Z7/pdisk_1.dat 2025-07-08T12:01:29.878760Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4562, node 2 2025-07-08T12:01:29.887247Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.887259Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.887262Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.887308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28573 TClient is connected to server localhost:28573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.965147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.965177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.966129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:29.968805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.970635Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:29.981834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.002341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.029271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.040442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.210827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.220477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.275597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.285518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.299390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.313694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.327807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.481643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 14299, MsgBus: 18845 2025-07-08T12:01:29.427741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680009789724154:2067];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.427759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b65/r3tmp/tmpZxs4Nv/pdisk_1.dat 2025-07-08T12:01:29.490807Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14299, node 1 2025-07-08T12:01:29.518949Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.518960Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.518962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.519004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:29.529208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.529247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.530313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18845 TClient is connected to server localhost:18845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.568872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.573715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:29.594684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.660003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.677537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.830051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.836681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.844549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.858459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.922389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.933649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.943739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.092516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22620, MsgBus: 63567 2025-07-08T12:01:30.546793Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680009947475969:2068];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:30.546832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b65/r3tmp/tmpbs2pZV/pdisk_1.dat 2025-07-08T12:01:30.562185Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22620, node 2 2025-07-08T12:01:30.569222Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:30.569233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:30.569235Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:30.569271Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63567 TClient is connected to server localhost:63567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.650424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.650466Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.650845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.651594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:30.654304Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:30.677188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.693546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.714127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.726670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.872899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.880001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.898205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.908511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.923074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.936885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.950739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.090781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.206370Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWRhMjZlYTktZWYxZTMxYzgtODkzYTM4MTYtMmU1ZmQ0M2Y=, ActorId: [2:7524680014242445919:2481], ActorState: ExecuteState, TraceId: 01jzmyk7j3begxez8gkkvhrgqv, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 27114, MsgBus: 11168 2025-07-08T12:01:29.371338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680006715154896:2067];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.371501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b64/r3tmp/tmp2FOSQS/pdisk_1.dat 2025-07-08T12:01:29.440466Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27114, node 1 2025-07-08T12:01:29.462482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.462493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.462494Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.462524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:29.472766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.472791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.473964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11168 TClient is connected to server localhost:11168 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.530596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.534929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:29.552187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.574660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.596542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:29.607409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.726528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.735698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.747665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.761290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.774780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.831992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.845040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.002256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14100, MsgBus: 11287 2025-07-08T12:01:30.464330Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680011284736586:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:30.464638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b64/r3tmp/tmpjGqKRD/pdisk_1.dat 2025-07-08T12:01:30.485484Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14100, node 2 2025-07-08T12:01:30.495515Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:30.495527Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:30.495530Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:30.495579Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11287 TClient is connected to server localhost:11287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.568907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.568933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.569348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.569836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:30.573479Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:30.579997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.594180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.616236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:30.632488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.793481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.801082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.810070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.817098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.824074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.831044Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.849922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.987263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12640, MsgBus: 11409 2025-07-08T12:01:28.698809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680005165390972:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:28.698832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6a/r3tmp/tmpcrWUtz/pdisk_1.dat 2025-07-08T12:01:28.762439Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12640, node 1 2025-07-08T12:01:28.785162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:28.785176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:28.785178Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:28.785219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11409 TClient is connected to server localhost:11409 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-07-08T12:01:28.838644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:28.838671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:28.839621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:28.851624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.854617Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:28.861746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.927941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.956680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:28.974503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.087094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.097116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.104357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.116163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.125002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.138212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.153137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.276383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.312465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.332861Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680009460360816:2455], SessionActorId: [1:7524680009460360717:2455], Scheme changed for table `/Root/TestTable`. 2025-07-08T12:01:29.334073Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680009460360816:2455], SessionActorId: [1:7524680009460360717:2455], statusCode=SCHEME_ERROR. Issue=
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028 . sessionActorId=[1:7524680009460360717:2455]. isRollback=0 2025-07-08T12:01:29.334143Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmE1YTdlNDctM2Y4NGQ4ZmEtZDVjNzdmZjYtNDkwODMxYjA=, ActorId: [1:7524680009460360717:2455], ActorState: ExecuteState, TraceId: 01jzmyk5qmdjexjr1j8exs6amj, got TEvKqpBuffer::TEvError in ExecuteState, status: SCHEME_ERROR send to: [1:7524680009460360900:2455] from: [1:7524680009460360816:2455] 2025-07-08T12:01:29.334230Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680009460360900:2455] TxId: 281474976715674. Ctx: { TraceId: 01jzmyk5qmdjexjr1j8exs6amj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE1YTdlNDctM2Y4NGQ4ZmEtZDVjNzdmZjYtNDkwODMxYjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. SCHEME_ERROR: {
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028 } 2025-07-08T12:01:29.334256Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680009460360903:2455], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MmE1YTdlNDctM2Y4NGQ4ZmEtZDVjNzdmZjYtNDkwODMxYjA=. CustomerSuppliedId : . TraceId : 01jzmyk5qmdjexjr1j8exs6amj. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [1:7524680009460360900:2455], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-07-08T12:01:29.334388Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmE1YTdlNDctM2Y4NGQ4ZmEtZDVjNzdmZjYtNDkwODMxYjA=, ActorId: [1:7524680009460360717:2455], ActorState: ExecuteState, TraceId: 01jzmyk5qmdjexjr1j8exs6amj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 23732, MsgBus: 19950 2025-07-08T12:01:29.607362Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680006520330175:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.608145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6a/r3tmp/tmpyxmGgT/pdisk_1.dat 2025-07-08T12:01:29.622648Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23732, node 2 2025-07-08T12:01:29.629373Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.629384Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.629385Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.629422Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19950 TClient is connected to server localhost:19950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.682971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.684201Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:29.693606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.712984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.713013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.714153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:29.752003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.766908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.782826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.943476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.951931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.959336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.971916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.987624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:29.999923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.016283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.162526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.195882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.214468Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680010815300029:2455], SessionActorId: [2:7524680010815299936:2455], Scheme changed for table `/Root/TestTable`. 2025-07-08T12:01:30.214497Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680010815300029:2455], SessionActorId: [2:7524680010815299936:2455], statusCode=SCHEME_ERROR. Issue=
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028 . sessionActorId=[2:7524680010815299936:2455]. isRollback=0 2025-07-08T12:01:30.214541Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2E0Yzk4ZDUtYWZlNTJlMjYtNGE2MzMzN2EtZDYzNjJhYTQ=, ActorId: [2:7524680010815299936:2455], ActorState: ExecuteState, TraceId: 01jzmyk6jrabkqe9084a2jxw5e, got TEvKqpBuffer::TEvError in ExecuteState, status: SCHEME_ERROR send to: [2:7524680010815300097:2455] from: [2:7524680010815300029:2455] 2025-07-08T12:01:30.214552Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524680010815300097:2455] TxId: 281474976715674. Ctx: { TraceId: 01jzmyk6jrabkqe9084a2jxw5e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2E0Yzk4ZDUtYWZlNTJlMjYtNGE2MzMzN2EtZDYzNjJhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. SCHEME_ERROR: {
: Error: Scheme changed. Table: `/Root/TestTable`., code: 2028 } 2025-07-08T12:01:30.214568Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680010815300100:2455], TxId: 281474976715674, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=Y2E0Yzk4ZDUtYWZlNTJlMjYtNGE2MzMzN2EtZDYzNjJhYTQ=. TraceId : 01jzmyk6jrabkqe9084a2jxw5e. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [2:7524680010815300097:2455], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-07-08T12:01:30.214659Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2E0Yzk4ZDUtYWZlNTJlMjYtNGE2MzMzN2EtZDYzNjJhYTQ=, ActorId: [2:7524680010815299936:2455], ActorState: ExecuteState, TraceId: 01jzmyk6jrabkqe9084a2jxw5e, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15440, MsgBus: 14711 2025-07-08T12:01:30.613808Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680013624314549:2181];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:30.614526Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b6a/r3tmp/tmpedimW5/pdisk_1.dat 2025-07-08T12:01:30.631159Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15440, node 3 2025-07-08T12:01:30.642804Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:30.642819Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:30.642821Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:30.642874Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14711 TClient is connected to server localhost:14711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.713977Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.714009Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.715174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:30.719933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.721342Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:30.733748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.748743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.773025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.783345Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.973204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.984660Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.992503Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.006521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.020464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.035012Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.048340Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.252893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.300932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.326473Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NmE1OGYzZjctNWVmNjc5NmEtYjE3NjExYWYtMTFjNWM4YzY=, ActorId: [3:7524680017919284146:2446], ActorState: ExecuteState, TraceId: 01jzmyk7na1p60wk0sxs0wzt10, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] Test command err: Trying to start YDB, gRPC: 9440, MsgBus: 29236 2025-07-08T12:01:29.642888Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680007417915832:2081];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:29.643011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b63/r3tmp/tmpAab5zF/pdisk_1.dat 2025-07-08T12:01:29.710322Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9440, node 1 2025-07-08T12:01:29.731219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:29.731231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:29.731233Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:29.731276Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:29.743120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:29.743162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:29.744217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29236 TClient is connected to server localhost:29236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:29.784608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.818001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.838389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.864422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:29.882892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.011907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.067999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.076344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.089639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.148649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.162521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.174093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26392, MsgBus: 31645 2025-07-08T12:01:30.611845Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680010312959436:2093];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:30.613586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b63/r3tmp/tmpBWRAYk/pdisk_1.dat TServer::EnableGrpc on GrpcPort 26392, node 2 2025-07-08T12:01:30.632623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:30.633114Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:30.633124Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:30.633126Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:30.633161Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31645 TClient is connected to server localhost:31645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.715710Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.715739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.716081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.716517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:30.717319Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:30.737875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.793938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.813784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.825406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.942857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.950491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.957396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.971200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.985728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.000326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.013275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] Test command err: 2025-07-08T12:00:38.813462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001a1c/r3tmp/tmpYuwL4E/pdisk_1.dat 2025-07-08T12:00:38.945601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-07-08T12:00:38.962847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:00:38.994933Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-07-08T12:00:38.995216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:38.995239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:38.995271Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-07-08T12:00:39.005778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:39.082821Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-07-08T12:00:39.082847Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-07-08T12:00:39.082876Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:578:2498] 2025-07-08T12:00:39.100016Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-07-08T12:00:39.100057Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-07-08T12:00:39.100297Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-07-08T12:00:39.100315Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-07-08T12:00:39.100375Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-07-08T12:00:39.100418Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-07-08T12:00:39.100432Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-07-08T12:00:39.100817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:39.100981Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 HANDLE EvClientConnected 2025-07-08T12:00:39.101104Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-07-08T12:00:39.101114Z node 1 :TX_PROXY DEBUG: Actor# [1:578:2498] txid# 281474976715657 SEND to# [1:545:2470] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-07-08T12:00:39.114847Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvBoot 2025-07-08T12:00:39.115081Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvRestored 2025-07-08T12:00:39.115156Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:603:2519] 2025-07-08T12:00:39.115215Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:00:39.123385Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:594:2513], Recipient [1:603:2519]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-07-08T12:00:39.123540Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:00:39.123563Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-07-08T12:00:39.123707Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-07-08T12:00:39.123716Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-07-08T12:00:39.123722Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-07-08T12:00:39.123765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-07-08T12:00:39.123783Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-07-08T12:00:39.123796Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:619:2519] in generation 1 2025-07-08T12:00:39.134055Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-07-08T12:00:39.138311Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-07-08T12:00:39.138379Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-07-08T12:00:39.138401Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:621:2529] 2025-07-08T12:00:39.138407Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-07-08T12:00:39.138411Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-07-08T12:00:39.138416Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-07-08T12:00:39.138469Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:603:2519], Recipient [1:603:2519]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:39.138476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-07-08T12:00:39.138583Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-07-08T12:00:39.138609Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-07-08T12:00:39.138626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-07-08T12:00:39.138633Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-07-08T12:00:39.138641Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-07-08T12:00:39.138646Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-07-08T12:00:39.138650Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-07-08T12:00:39.138654Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-07-08T12:00:39.138660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-07-08T12:00:39.138765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:610:2523], Recipient [1:603:2519]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.138772Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-07-08T12:00:39.138779Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:599:2516], serverId# [1:610:2523], sessionId# [0:0:0] 2025-07-08T12:00:39.138799Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:361:2356], Recipient [1:610:2523] 2025-07-08T12:00:39.138803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-07-08T12:00:39.138825Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-07-08T12:00:39.138883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-07-08T12:00:39.138895Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-07-08T12:00:39.138910Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-07-08T12:00:39.138918Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-07-08T12:00:39.138922Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-07-08T12:00:39.138929Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-07-08T12:00:39.138933Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:39.138996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-07-08T12:00:39.139000Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-07-08T12:00:39.139003Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-07-08T12:00:39.139006Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:39.139018Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-07-08T12:00:39.139021Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-07-08T12:00:39.139024Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-07-08T12:00:39.139027Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-07-08T12:00:39.139032Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-07-08T12:00:39.139251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:622:2530], Recipient [1:603:2519]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-07-08T12:00:39.139260Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-07-08T12:00:39.149567Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-07-08T12:00:39.149598Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-07-08T12:00:39.149607Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-07-08T12:00:39.149620Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ... /3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [12:989:2798], finished: 0 2025-07-08T12:01:31.084537Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send ack to channelId: 2, seqNo: 1, to: [12:989:2798] 2025-07-08T12:01:31.084570Z node 12 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [12:988:2797], Recipient [12:825:2678]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-07-08T12:01:31.084581Z node 12 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-07-08T12:01:31.084589Z node 12 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-07-08T12:01:31.084598Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:01:31.084601Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-07-08T12:01:31.084604Z node 12 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-07-08T12:01:31.084607Z node 12 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-07-08T12:01:31.084615Z node 12 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-07-08T12:01:31.084620Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:01:31.084622Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-07-08T12:01:31.084626Z node 12 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-07-08T12:01:31.084629Z node 12 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-07-08T12:01:31.084640Z node 12 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-07-08T12:01:31.084661Z node 12 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-07-08T12:01:31.084665Z node 12 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[12:988:2797], 1} after executionsCount# 1 2025-07-08T12:01:31.084670Z node 12 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[12:988:2797], 1} sends rowCount# 2, bytes# 64, quota rows left# 998, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-07-08T12:01:31.084679Z node 12 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[12:988:2797], 1} finished in read 2025-07-08T12:01:31.084685Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:01:31.084688Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-07-08T12:01:31.084691Z node 12 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-07-08T12:01:31.084695Z node 12 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-07-08T12:01:31.084701Z node 12 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-07-08T12:01:31.084704Z node 12 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-07-08T12:01:31.084707Z node 12 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-07-08T12:01:31.084711Z node 12 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-07-08T12:01:31.084721Z node 12 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-07-08T12:01:31.084805Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [12:989:2798], finished: 1 2025-07-08T12:01:31.084814Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Send ack to channelId: 2, seqNo: 2, to: [12:989:2798] 2025-07-08T12:01:31.084829Z node 12 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [12:988:2797], Recipient [12:825:2678]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-07-08T12:01:31.084834Z node 12 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-07-08T12:01:31.084961Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [12:986:2798], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 164 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 64 FinishTimeMs: 1751976091084 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 45 BuildCpuTimeUs: 19 HostName: "ghrun-3z2hjo4icm" NodeId: 12 CreateTimeMs: 1751976091083 UpdateTimeMs: 1751976091084 } MaxMemoryUsage: 1048576 } 2025-07-08T12:01:31.084985Z node 12 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [12:986:2798] 2025-07-08T12:01:31.084996Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [12:985:2797], 2025-07-08T12:01:31.085002Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [12:985:2797], 2025-07-08T12:01:31.085081Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [12:985:2797], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 296 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 81 FinishTimeMs: 1751976091085 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 2 } IngressRows: 3 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 43 WaitInputTimeUs: 329 HostName: "ghrun-3z2hjo4icm" NodeId: 12 StartTimeMs: 1751976091084 CreateTimeMs: 1751976091083 UpdateTimeMs: 1751976091085 } MaxMemoryUsage: 1048576 } 2025-07-08T12:01:31.085094Z node 12 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [12:985:2797] 2025-07-08T12:01:31.085134Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-07-08T12:01:31.085140Z node 12 :KQP_EXECUTER TRACE: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-07-08T12:01:31.085147Z node 12 :KQP_EXECUTER DEBUG: ActorId: [12:975:2780] TxId: 281474976715667. Ctx: { TraceId: 01jzmyk7d50wrn1cwf3042989s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000460s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 2025-07-08T12:01:31.085166Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-07-08T12:01:31.085217Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, txInfo Status: Committed Kind: ReadOnly TotalDuration: 24.694 ServerDuration: 24.669 QueriesCount: 2 2025-07-08T12:01:31.085255Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-07-08T12:01:31.085353Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-07-08T12:01:31.085359Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, EndCleanup, isFinal: 0 2025-07-08T12:01:31.085373Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=MWZhNjA5NS0yYzE4MGEwNy0xNjcyYWZjNy01MzMwYzMxYg==, ActorId: [12:965:2780], ActorState: ExecuteState, TraceId: 01jzmyk7d50wrn1cwf3042989s, Sent query response back to proxy, proxyRequestId: 11, proxyId: [12:58:2105] { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 5 } items { uint32_value: 50 } }, { items { uint32_value: 6 } items { uint32_value: 60 } } >> BsControllerTest::DecommitRejected ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTablesPermissions [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:137:2159] sender: [1:138:2058] recipient: [1:113:2144] 2025-07-08T12:01:26.697231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.697257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.697263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.697268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.697284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.697288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.697298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.697311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.697386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.709816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.709841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:26.711752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.711780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.711803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.712473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.712506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.712598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.712653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.713167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.713203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.713443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.713451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.713471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.713477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.713483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.713511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.714658Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:137:2159] sender: [1:241:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.741946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:26.742035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.742099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:26.742160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:26.742170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.743340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.743372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:26.743426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.743435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:26.743440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:26.743447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:26.743923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.743935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:26.743940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:26.746048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.746065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.746071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.746078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.746654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:26.747271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:26.747313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:26.747502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.747527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:26.747535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.747591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:26.747599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.747625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:26.747638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:26.748308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.748317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.748360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.748366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:26.748377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.748385Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:26.748398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.748402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.748407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.748410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.748414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:26.748419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.748422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-07-08T12:01:26.748426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-07-08T12:01:26.748439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:26.748444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-07-08T12:01:26.748448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-07-08T12:01:26.748866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-07-08T12:01:26.748886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, ms ... 16545 2025-07-08T12:01:31.305774Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-07-08T12:01:31.305799Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000007 2025-07-08T12:01:31.305931Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:31.305974Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 126 RawX2: 68719478888 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:31.305983Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000007, at schemeshard: 72057594046678944 2025-07-08T12:01:31.306000Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-07-08T12:01:31.306024Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 REQUEST: PUT /table/metadata.json HTTP/1.1 HEADERS: Host: localhost:14322 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BC173644-63D2-4099-A2DE-59A428F89D43 amz-sdk-request: attempt=1 content-length: 73 content-md5: oBd372HtOJ3JW3N2b2gUVA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-07-08T12:01:31.310332Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:31.310344Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-07-08T12:01:31.310398Z node 16 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:31.310404Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [16:206:2208], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-07-08T12:01:31.310507Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T12:01:31.310517Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:31.310723Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T12:01:31.310738Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-07-08T12:01:31.310742Z node 16 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-07-08T12:01:31.310747Z node 16 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-07-08T12:01:31.310753Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-07-08T12:01:31.310805Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /table/permissions.pb HTTP/1.1 HEADERS: Host: localhost:14322 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 60171A6B-85AC-4502-8937-761DECC7CF68 amz-sdk-request: attempt=1 content-length: 43 content-md5: JIqMFsQjXF0c+sG0y+coog== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/permissions.pb / / 43 2025-07-08T12:01:31.311758Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 TestWaitNotification wait txId: 1004 2025-07-08T12:01:31.311819Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T12:01:31.311825Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 REQUEST: PUT /table/scheme.pb HTTP/1.1 HEADERS: Host: localhost:14322 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FB4AE0E5-A0BB-4784-A547-8F33E7D17D75 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 2025-07-08T12:01:31.311885Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 1004, at schemeshard: 72057594046678944 S3_MOCK::HttpServeWrite: 2025-07-08T12:01:31.311891Z node 16 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1004, at schemeshard: 72057594046678944 /table/scheme.pb / / 355 REQUEST: PUT /table/data_00.csv HTTP/1.1 HEADERS: Host: localhost:14322 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: ACA374C7-E14E-45D9-ACDA-138A8E731A68 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-139-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /table/data_00.csv / / 0 2025-07-08T12:01:31.315254Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 492 RawX2: 68719479198 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T12:01:31.315268Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-07-08T12:01:31.315287Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 492 RawX2: 68719479198 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T12:01:31.315301Z node 16 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 492 RawX2: 68719479198 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-07-08T12:01:31.315312Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:31.315317Z node 16 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T12:01:31.315322Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-07-08T12:01:31.315328Z node 16 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-07-08T12:01:31.315360Z node 16 :FLAT_TX_SCHEMESHARD INFO: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:31.315754Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T12:01:31.315817Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-07-08T12:01:31.315824Z node 16 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-07-08T12:01:31.315837Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T12:01:31.315841Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T12:01:31.315846Z node 16 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-07-08T12:01:31.315849Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T12:01:31.315853Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-07-08T12:01:31.315866Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:131:2156] message: TxId: 281474976710759 2025-07-08T12:01:31.315872Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-07-08T12:01:31.315877Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-07-08T12:01:31.315880Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-07-08T12:01:31.315904Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-07-08T12:01:31.316526Z node 16 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-07-08T12:01:31.316546Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-07-08T12:01:31.316554Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T12:01:31.316558Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759 2025-07-08T12:01:31.316563Z node 16 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710759, id# 1004, itemIdx# 1 2025-07-08T12:01:31.317067Z node 16 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T12:01:31.317090Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T12:01:31.317107Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [16:634:2592] TestWaitNotification: OK eventTxId 1004 |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20732, MsgBus: 7457 2025-07-08T12:01:30.159419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680013404919667:2239];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:30.159559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b62/r3tmp/tmpN7XH4w/pdisk_1.dat 2025-07-08T12:01:30.216643Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20732, node 1 2025-07-08T12:01:30.239159Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:30.239175Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:30.239176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:30.239214Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7457 2025-07-08T12:01:30.258011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:30.258040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:30.259155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:30.302497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.305027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:30.362667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.389729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:30.411328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.430049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:30.582775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.642121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.654311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.663657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.677730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.693884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.705332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:30.852738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28882, MsgBus: 25067 2025-07-08T12:01:31.272565Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680015504111113:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:31.273928Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b62/r3tmp/tmpV7SWYD/pdisk_1.dat 2025-07-08T12:01:31.290636Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28882, node 2 2025-07-08T12:01:31.297194Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:31.297207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:31.297209Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:31.297249Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25067 TClient is connected to server localhost:25067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:31.378832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:31.378863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:31.379229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:31.380927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:31.425055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:31.447447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:31.464162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:31.476156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:31.597331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.606608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.617485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.630542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.684786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.692518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.708582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:31.860538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 |69.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> BsControllerTest::SelfHealMirror3dc >> BsControllerTest::TestLocalSelfHeal >> KqpImmediateEffects::Upsert >> KqpInplaceUpdate::SingleRowArithm+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-07-08T12:01:32.499239Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-07-08T12:01:32.499253Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:32.499272Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-07-08T12:01:32.499275Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:32.499280Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-07-08T12:01:32.499283Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:32.499287Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-07-08T12:01:32.499290Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:32.499294Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-07-08T12:01:32.499297Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:32.499300Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-07-08T12:01:32.499303Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:32.499307Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-07-08T12:01:32.499310Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:32.499314Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-07-08T12:01:32.499317Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:32.499320Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-07-08T12:01:32.499323Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:32.499326Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-07-08T12:01:32.499329Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:32.499332Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-07-08T12:01:32.499335Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:32.499338Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-07-08T12:01:32.499341Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:32.499344Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-07-08T12:01:32.499346Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:32.499350Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-07-08T12:01:32.499353Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:32.499356Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-07-08T12:01:32.499359Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:32.501306Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:508:32] Status# ERROR ClientId# [1:508:32] ServerId# [0:0:0] PipeClient# [1:508:32] 2025-07-08T12:01:32.501416Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:509:20] Status# ERROR ClientId# [2:509:20] ServerId# [0:0:0] PipeClient# [2:509:20] 2025-07-08T12:01:32.501425Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:510:20] Status# ERROR ClientId# [3:510:20] ServerId# [0:0:0] PipeClient# [3:510:20] 2025-07-08T12:01:32.501432Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:511:20] Status# ERROR ClientId# [4:511:20] ServerId# [0:0:0] PipeClient# [4:511:20] 2025-07-08T12:01:32.501438Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:512:20] Status# ERROR ClientId# [5:512:20] ServerId# [0:0:0] PipeClient# [5:512:20] 2025-07-08T12:01:32.501444Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:513:20] Status# ERROR ClientId# [6:513:20] ServerId# [0:0:0] PipeClient# [6:513:20] 2025-07-08T12:01:32.501451Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:514:20] Status# ERROR ClientId# [7:514:20] ServerId# [0:0:0] PipeClient# [7:514:20] 2025-07-08T12:01:32.501457Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:515:20] Status# ERROR ClientId# [8:515:20] ServerId# [0:0:0] PipeClient# [8:515:20] 2025-07-08T12:01:32.501463Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:516:20] Status# ERROR ClientId# [9:516:20] ServerId# [0:0:0] PipeClient# [9:516:20] 2025-07-08T12:01:32.501470Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:517:20] Status# ERROR ClientId# [10:517:20] ServerId# [0:0:0] PipeClient# [10:517:20] 2025-07-08T12:01:32.501479Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:518:20] Status# ERROR ClientId# [11:518:20] ServerId# [0:0:0] PipeClient# [11:518:20] 2025-07-08T12:01:32.501485Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:519:20] Status# ERROR ClientId# [12:519:20] ServerId# [0:0:0] PipeClient# [12:519:20] 2025-07-08T12:01:32.501491Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:520:20] Status# ERROR ClientId# [13:520:20] ServerId# [0:0:0] PipeClient# [13:520:20] 2025-07-08T12:01:32.501496Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:521:20] Status# ERROR ClientId# [14:521:20] ServerId# [0:0:0] PipeClient# [14:521:20] 2025-07-08T12:01:32.501502Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:522:20] Status# ERROR ClientId# [15:522:20] ServerId# [0:0:0] PipeClient# [15:522:20] 2025-07-08T12:01:32.508619Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:32.508637Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:32.508645Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:32.508654Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:32.508660Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:32.508668Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:32.508676Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:32.508684Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:32.508692Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:32.508701Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:32.508708Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:32.508715Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:32.508723Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:32.508739Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:32.508746Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:32.509102Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:581:60] Status# OK ClientId# [1:581:60] ServerId# [1:610:61] PipeClient# [1:581:60] 2025-07-08T12:01:32.509110Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-07-08T12:01:32.509576Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:582:21] Status# OK ClientId# [2:582:21] ServerId# [1:611:62] PipeClient# [2:582:21] 2025-07-08T12:01:32.509583Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-07-08T12:01:32.509589Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:583:21] Status# OK ClientId# [3:583:21] ServerId# [1:612:63] PipeClient# [3:583:21] 2025-07-08T12:01:32.509593Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-07-08T12:01:32.509598Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:584:21] Status# OK ClientId# [4:584:21] ServerId# [1:613:64] PipeClient# [4:584:21] 2025-07-08T12:01:32.509602Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-07-08T12:01:32.509607Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:585:21] Status# OK ClientId# [5:585:21] ServerId# [1:614:65] PipeClient# [5:585:21] 2025-07-08T12:01:32.509610Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-07-08T12:01:32.509615Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:586:21] Status# OK ClientId# [6:586:21] ServerId# [1:615:66] PipeClient# [6:586:21] 2025-07-08T12:01:32.509619Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-07-08T12:01:32.509624Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:587:21] Status# OK ClientId# [7:587:21] ServerId# [1:616:67] PipeClient# [7:587:21] 2025-07-08T12:01:32.509627Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-07-08T12:01:32.509634Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:588:21] Status# OK ClientId# [8:588:21] ServerId# [1:617:68] PipeClient# [8:588:21] 2025-07-08T12:01:32.509638Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-07-08T12:01:32.509643Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:589:21] Status# OK ClientId# [9:589:21] ServerId# [1:618:69] PipeClient# [9:589:21] 2025-07-08T12:01:32.509646Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-07-08T12:01:32.509651Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:590:21] Status# OK ClientId# [10:590:21] ServerId# [1:619:70] PipeClient# [10:590:21] 2025-07-08T12:01:32.509655Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-07-08T12:01:32.509660Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:591:21] Status# OK ClientId# [11:591:21] ServerId# [1:620:71] PipeClient# [11:591:21] 2025-07-08T12:01:32.509663Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-07-08T12:01:32.509668Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:592:21] Status# OK ClientId# [12:592:21] ServerId# [1:621:72] PipeClient# [12:592:21] 2025-07-08T12:01:32.509671Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-07-08T12:01:32.509677Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:593:21] Status# OK ClientId# [13:593:21] ServerId# [1:622:73] PipeClient# [13:593:21] 2025-07-08T12:01:32.509680Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-07-08T12:01:32.509685Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:594:21] Status# OK ClientId# [14:594:21] ServerId# [1:623:74] PipeClient# [14:594:21] 2025-07-08T12:01:32.509689Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-07-08T12:01:32.509693Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:595:21] Status# OK ClientId# [15:595:21] ServerId# [1:624:75] PipeClient# [15:595:21] 2025-07-08T12:01:32.509697Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-07-08T12:01:32.510046Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:32.510057Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-07-08T12:01:32.513003Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-07-08T12:01:32.513258Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.513271Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-07-08T12:01:32.513284Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-07-08T12:01:32.513299Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-07-08T12:01:32.513305Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-07-08T12:01:32.513312Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-07-08T12:01:32.513325Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:32.513330Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-07-08T12:01:32.513337Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-07-08T12:01:32.513349Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-07-08T12:01:32.513355Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-07-08T12:01:32.513361Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-07-08T1 ... },{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.590753Z 1 00h01m10.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.590797Z 10 00h01m12.420512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-07-08T12:01:32.590836Z 1 00h01m12.420512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.590895Z 12 00h01m15.395512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-07-08T12:01:32.590931Z 1 00h01m15.395512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.590951Z 1 00h01m15.622512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-07-08T12:01:32.590980Z 1 00h01m15.622512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.590998Z 11 00h01m15.855512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-07-08T12:01:32.591034Z 1 00h01m15.855512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591054Z 14 00h01m16.984536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-07-08T12:01:32.591102Z 1 00h01m16.984536s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591211Z 8 00h01m16.985048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-07-08T12:01:32.591218Z 8 00h01m16.985048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-07-08T12:01:32.591245Z 15 00h01m19.183512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-07-08T12:01:32.591283Z 1 00h01m19.183512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591304Z 14 00h01m19.329512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2025-07-08T12:01:32.591348Z 1 00h01m19.329512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591375Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591397Z 2 00h01m20.303512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-07-08T12:01:32.591452Z 1 00h01m20.303512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:708} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready Faulty Decommitted}] GroupId# 2147483648 2025-07-08T12:01:32.591477Z 13 00h01m21.052024s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] status changed to READY 2025-07-08T12:01:32.591533Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483648 2025-07-08T12:01:32.591629Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591636Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-07-08T12:01:32.591671Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591676Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-07-08T12:01:32.591681Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591685Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-07-08T12:01:32.591693Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591697Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-07-08T12:01:32.591703Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591707Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-07-08T12:01:32.591712Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591716Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-07-08T12:01:32.591720Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591725Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-07-08T12:01:32.591730Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.591735Z 1 00h01m21.052024s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-07-08T12:01:32.592074Z 1 00h01m21.052536s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:32.592084Z 1 00h01m21.052536s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-07-08T12:01:32.592172Z 1 00h01m21.052536s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-07-08T12:01:32.592178Z 1 00h01m21.052536s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483648 Success# true 2025-07-08T12:01:32.592193Z 7 00h01m21.052536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:32.592199Z 7 00h01m21.052536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-07-08T12:01:32.592211Z 2 00h01m21.052536s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.592217Z 2 00h01m21.052536s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-07-08T12:01:32.592226Z 3 00h01m21.052536s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-07-08T12:01:32.592231Z 3 00h01m21.052536s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-07-08T12:01:32.592240Z 4 00h01m21.052536s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:32.592245Z 4 00h01m21.052536s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-07-08T12:01:32.592254Z 5 00h01m21.052536s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-07-08T12:01:32.592259Z 5 00h01m21.052536s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-07-08T12:01:32.592267Z 6 00h01m21.052536s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-07-08T12:01:32.592272Z 6 00h01m21.052536s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-07-08T12:01:32.592279Z 9 00h01m21.052536s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-07-08T12:01:32.592288Z 13 00h01m21.052536s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-07-08T12:01:32.592294Z 13 00h01m21.052536s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-07-08T12:01:32.592305Z 14 00h01m21.052536s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-07-08T12:01:32.592311Z 14 00h01m21.052536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-07-08T12:01:32.592320Z 15 00h01m21.052536s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-07-08T12:01:32.592324Z 15 00h01m21.052536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-07-08T12:01:32.592333Z 15 00h01m21.052536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-07-08T12:01:32.592483Z 13 00h01m21.872512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY 2025-07-08T12:01:32.592585Z 15 00h01m25.779536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-07-08T12:01:32.592633Z 3 00h01m26.443512s :BS_NODE DEBUG: [3] VDiskId# [80000001:1:2:2:0] status changed to READY 2025-07-08T12:01:32.592902Z 15 00h01m32.339536s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-07-08T12:01:32.593045Z 9 00h01m32.340048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-07-08T12:01:32.593054Z 9 00h01m32.340048s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> KqpImmediateEffects::UpsertExistingKey >> IndexBuildTestReboots::BaseCaseWithDataColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-07-08T12:01:31.200354Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-07-08T12:01:31.200373Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:31.200390Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-07-08T12:01:31.200394Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:31.200400Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-07-08T12:01:31.200404Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:31.200409Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-07-08T12:01:31.200413Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:31.200418Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-07-08T12:01:31.200421Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:31.200429Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-07-08T12:01:31.200433Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:31.200438Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-07-08T12:01:31.200442Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:31.200447Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-07-08T12:01:31.200451Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:31.200463Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-07-08T12:01:31.200466Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:31.200473Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-07-08T12:01:31.200477Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:31.200482Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-07-08T12:01:31.200486Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:31.200492Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-07-08T12:01:31.200497Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:31.200502Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-07-08T12:01:31.200506Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:31.200512Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-07-08T12:01:31.200516Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:31.200525Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-07-08T12:01:31.200529Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:31.200534Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-07-08T12:01:31.200537Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-07-08T12:01:31.200543Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-07-08T12:01:31.200546Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-07-08T12:01:31.200551Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-07-08T12:01:31.200555Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-07-08T12:01:31.200561Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-07-08T12:01:31.200565Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-07-08T12:01:31.200571Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-07-08T12:01:31.200575Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-07-08T12:01:31.200580Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-07-08T12:01:31.200584Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-07-08T12:01:31.200589Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-07-08T12:01:31.200593Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-07-08T12:01:31.200598Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-07-08T12:01:31.200602Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-07-08T12:01:31.200608Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-07-08T12:01:31.200612Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-07-08T12:01:31.200619Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-07-08T12:01:31.200623Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-07-08T12:01:31.200628Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-07-08T12:01:31.200632Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-07-08T12:01:31.200637Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-07-08T12:01:31.200643Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-07-08T12:01:31.200648Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-07-08T12:01:31.200651Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-07-08T12:01:31.200655Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-07-08T12:01:31.200657Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-07-08T12:01:31.200660Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-07-08T12:01:31.200662Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-07-08T12:01:31.200666Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-07-08T12:01:31.200668Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-07-08T12:01:31.200674Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-07-08T12:01:31.200676Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-07-08T12:01:31.202911Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2157:49] Status# ERROR ClientId# [1:2157:49] ServerId# [0:0:0] PipeClient# [1:2157:49] 2025-07-08T12:01:31.203077Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2158:37] Status# ERROR ClientId# [2:2158:37] ServerId# [0:0:0] PipeClient# [2:2158:37] 2025-07-08T12:01:31.203082Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2159:37] Status# ERROR ClientId# [3:2159:37] ServerId# [0:0:0] PipeClient# [3:2159:37] 2025-07-08T12:01:31.203086Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2160:37] Status# ERROR ClientId# [4:2160:37] ServerId# [0:0:0] PipeClient# [4:2160:37] 2025-07-08T12:01:31.203091Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2161:37] Status# ERROR ClientId# [5:2161:37] ServerId# [0:0:0] PipeClient# [5:2161:37] 2025-07-08T12:01:31.203097Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2162:37] Status# ERROR ClientId# [6:2162:37] ServerId# [0:0:0] PipeClient# [6:2162:37] 2025-07-08T12:01:31.203102Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2163:37] Status# ERROR ClientId# [7:2163:37] ServerId# [0:0:0] PipeClient# [7:2163:37] 2025-07-08T12:01:31.203108Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2164:37] Status# ERROR ClientId# [8:2164:37] ServerId# [0:0:0] PipeClient# [8:2164:37] 2025-07-08T12:01:31.203114Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2165:37] Status# ERROR ClientId# [9:2165:37] ServerId# [0:0:0] PipeClient# [9:2165:37] 2025-07-08T12:01:31.203119Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2166:37] Status# ERROR ClientId# [10:2166:37] ServerId# [0:0:0] PipeClient# [10:2166:37] 2025-07-08T12:01:31.203125Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2167:37] Status# ERROR ClientId# [11:2167:37] ServerId# [0:0:0] PipeClient# [11:2167:37] 2025-07-08T12:01:31.203131Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2168:37] Status# ERROR ClientId# [12:2168:37] ServerId# [0:0:0] PipeClient# [12:2168:37] 2025-07-08T12:01:31.203137Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2169:37] Status# ERROR ClientId# [13:2169:37] ServerId# [0:0:0] PipeClient# [13:2169:37] 2025-07-08T12:01:31.203143Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2170:37] Status# ERROR ClientId# [14:2170:37] ServerId# [0:0:0] PipeClient# [14:2170:37] 2025-07-08T12:01:31.203149Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2171:37] Status# ERROR ClientId# [15:2171:37] ServerId# [0:0:0] PipeClient# [15:2171:37] 2025-07-08T12:01:31.203155Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2172:37] Status# ERROR ClientId# [16:2172:37] ServerId# [0:0:0] PipeClient# [16:2172:37] 2025-07-08T12:01:31.203161Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2173:37] Status# ERROR ClientId# [17:2173:37] ServerId# [0:0:0] PipeClient# [17:2173:37] 2025-07-08T12:01:31.203167Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2174:37] Status# ERROR ClientId# [18:2174:37] ServerId# [0:0:0] PipeClient# [18:2174:37] 2025-07-08T12:01:31.203175Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2175:37] Status# ERROR ClientId# [19:2175:37] ServerId# [0:0:0] PipeClient# [19:2175:37] 2025-07-08T12:01:31.203181Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2176:37] Status# ERROR ClientId# [20:2176:37] ServerId# [0:0:0] PipeClient# [20:2176:37] 2025-07-08T12:01:31.203187Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2177:37] Status# ERROR ClientId# [21:2177:37] ServerId# [0:0:0] PipeClient# [21:2177:37] 2025-07-08T12:01:31.203194Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2178:37] Status# ERROR ClientId# [22:2178:37] ServerId# [0:0:0] PipeClient# [22:2178:37] 2025-07-08T12:01:31.203199Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2179:37] Status# ERROR ClientId# [23:2179:37] ServerId# [0:0:0] PipeClient# [23:2179:37] 2025-07-08T12:01:31.203206Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2180:37] Status# ERROR ClientId# [24:2180:37] ServerId# [0:0:0] PipeClient# [24:2180:37] 2025-07-08T12:01:31.203212Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2181:37] Status# ERROR ClientId# [25:2181:37] ServerId# [0:0:0] PipeClient# [25:2181:37] 2025-07-08T12:01:31.203218Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2182:37] Status# ERROR ClientId# [26:2182:37] ServerId# [0:0:0] PipeClient# [26:2182:37] 2025-07-08T12:01:31.203223Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2183:37] Status# ERROR ClientId# [27:2183:37] ServerId# [0:0:0] PipeClient# [27:2183:37] 2025-07-08T12:01:31.203229Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2184:37] Status# ERROR ClientId# [28:2184:37] ServerId# [0:0:0] PipeClient# [28:2184:37] 2025-07-08T12:01:31.203235Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2185:37] Status# ERROR ClientId# [29:2185:37] ServerId# [0:0:0] PipeClient# [29:2185:37] 2025-07-08T12:01:31.203241Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2186:37] Status# ERROR ClientId# [30:2186:37] ServerId# [0:0:0] PipeClient# [30:2186:37] 2025-07-08T12:01:31.203247Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2187:37] Status# ERROR ClientId# [31:2187:37] ServerId# [0:0:0] PipeClient# [31:2187:37] 2025-07-08T12:01:31.203254Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2188:37] Status# ERROR ClientId# [32:2188:37] ServerId# [0:0:0] PipeClient# [32:2188:37] 2025-07-08T12:01:31.228330Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2257:73] expected 1 current 0 2025-07-08T12:01:31.228354Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2258:38] expected 1 current 0 2025-07-08T12:01:31.228361Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2259:38] expected 1 current 0 2025-07-08T12:01:31.228368Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2260:38] expected 1 current 0 2025-07-08T12:01:31.228375Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2261:38] expected 1 current 0 2025-07-08T12:01:31.228380Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2262:38] expected 1 current 0 2025-07-08T12:01:31.228386Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7:2263:38] expected 1 current 0 2025-07-08T12:01:31.228393Z 8 00h00m00.002560s :BS_NODE DEBUG: [8] CheckState from [8:2264:38] expected 1 current 0 2025-07-08T12:01:31.228399Z 9 00h00m00.002560s :BS_NODE DEBUG: [9] CheckState from [9:2265:38] expected 1 current 0 2025-07-08T12:01:31.228405Z 10 00h00m00.002560s :BS_NODE DEBUG: [10] CheckState from [10:2266 ... ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:1:0:5:0] DiskIsOk# true 2025-07-08T12:01:32.590301Z 1 04h55m00.119968s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483688 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.590306Z 1 04h55m00.119968s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:1:0:6:0] DiskIsOk# true 2025-07-08T12:01:32.590314Z 1 04h55m00.119968s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483688 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.590319Z 1 04h55m00.119968s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483688 VDiskId# [80000028:1:0:7:0] DiskIsOk# true 2025-07-08T12:01:32.592113Z 1 04h55m00.120480s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:32.592127Z 1 04h55m00.120480s :BS_NODE DEBUG: [1] VDiskId# [80000028:1:0:0:0] -> [80000028:2:0:0:0] 2025-07-08T12:01:32.592225Z 1 04h55m00.120480s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483688 Items# [80000028:1:0:1:0]: 2:1000:1005 -> 22:1000:1013 ConfigTxSeqNo# 481 2025-07-08T12:01:32.592231Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483688 Success# true 2025-07-08T12:01:32.592251Z 2 04h55m00.120480s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.592264Z 3 04h55m00.120480s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-07-08T12:01:32.592271Z 3 04h55m00.120480s :BS_NODE DEBUG: [3] VDiskId# [80000028:1:0:2:0] -> [80000028:2:0:2:0] 2025-07-08T12:01:32.592281Z 4 04h55m00.120480s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:32.592288Z 4 04h55m00.120480s :BS_NODE DEBUG: [4] VDiskId# [80000028:1:0:3:0] -> [80000028:2:0:3:0] 2025-07-08T12:01:32.592298Z 22 04h55m00.120480s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-07-08T12:01:32.592306Z 22 04h55m00.120480s :BS_NODE DEBUG: [22] VDiskId# [80000028:2:0:1:0] PDiskId# 1000 VSlotId# 1013 created 2025-07-08T12:01:32.592315Z 22 04h55m00.120480s :BS_NODE DEBUG: [22] VDiskId# [80000028:2:0:1:0] status changed to INIT_PENDING 2025-07-08T12:01:32.592328Z 5 04h55m00.120480s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-07-08T12:01:32.592334Z 5 04h55m00.120480s :BS_NODE DEBUG: [5] VDiskId# [80000028:1:0:4:0] -> [80000028:2:0:4:0] 2025-07-08T12:01:32.592344Z 6 04h55m00.120480s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-07-08T12:01:32.592351Z 6 04h55m00.120480s :BS_NODE DEBUG: [6] VDiskId# [80000028:1:0:5:0] -> [80000028:2:0:5:0] 2025-07-08T12:01:32.592362Z 7 04h55m00.120480s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:32.592368Z 7 04h55m00.120480s :BS_NODE DEBUG: [7] VDiskId# [80000028:1:0:6:0] -> [80000028:2:0:6:0] 2025-07-08T12:01:32.592377Z 8 04h55m00.120480s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-07-08T12:01:32.592384Z 8 04h55m00.120480s :BS_NODE DEBUG: [8] VDiskId# [80000028:1:0:7:0] -> [80000028:2:0:7:0] 2025-07-08T12:01:32.592426Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483672 2025-07-08T12:01:32.592521Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592527Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:0:0] DiskIsOk# true 2025-07-08T12:01:32.592573Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592578Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:2:0] DiskIsOk# true 2025-07-08T12:01:32.592586Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592591Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:3:0] DiskIsOk# true 2025-07-08T12:01:32.592596Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592601Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:4:0] DiskIsOk# true 2025-07-08T12:01:32.592606Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592611Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:5:0] DiskIsOk# true 2025-07-08T12:01:32.592616Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592621Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:6:0] DiskIsOk# true 2025-07-08T12:01:32.592626Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483672 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:32.592630Z 1 04h55m00.120480s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483672 VDiskId# [80000018:1:0:7:0] DiskIsOk# true 2025-07-08T12:01:32.594474Z 1 04h55m00.120992s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:32.594489Z 1 04h55m00.120992s :BS_NODE DEBUG: [1] VDiskId# [80000018:1:0:0:0] -> [80000018:2:0:0:0] 2025-07-08T12:01:32.594564Z 1 04h55m00.120992s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483672 Items# [80000018:1:0:1:0]: 2:1000:1003 -> 18:1000:1013 ConfigTxSeqNo# 482 2025-07-08T12:01:32.594570Z 1 04h55m00.120992s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483672 Success# true 2025-07-08T12:01:32.594590Z 18 04h55m00.120992s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-07-08T12:01:32.594596Z 18 04h55m00.120992s :BS_NODE DEBUG: [18] VDiskId# [80000018:2:0:1:0] PDiskId# 1000 VSlotId# 1013 created 2025-07-08T12:01:32.594604Z 18 04h55m00.120992s :BS_NODE DEBUG: [18] VDiskId# [80000018:2:0:1:0] status changed to INIT_PENDING 2025-07-08T12:01:32.594616Z 2 04h55m00.120992s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.594625Z 3 04h55m00.120992s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-07-08T12:01:32.594631Z 3 04h55m00.120992s :BS_NODE DEBUG: [3] VDiskId# [80000018:1:0:2:0] -> [80000018:2:0:2:0] 2025-07-08T12:01:32.594639Z 4 04h55m00.120992s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:32.594644Z 4 04h55m00.120992s :BS_NODE DEBUG: [4] VDiskId# [80000018:1:0:3:0] -> [80000018:2:0:3:0] 2025-07-08T12:01:32.594653Z 5 04h55m00.120992s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-07-08T12:01:32.594657Z 5 04h55m00.120992s :BS_NODE DEBUG: [5] VDiskId# [80000018:1:0:4:0] -> [80000018:2:0:4:0] 2025-07-08T12:01:32.594665Z 6 04h55m00.120992s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-07-08T12:01:32.594671Z 6 04h55m00.120992s :BS_NODE DEBUG: [6] VDiskId# [80000018:1:0:5:0] -> [80000018:2:0:5:0] 2025-07-08T12:01:32.594679Z 7 04h55m00.120992s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:32.594684Z 7 04h55m00.120992s :BS_NODE DEBUG: [7] VDiskId# [80000018:1:0:6:0] -> [80000018:2:0:6:0] 2025-07-08T12:01:32.594692Z 8 04h55m00.120992s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-07-08T12:01:32.594701Z 8 04h55m00.120992s :BS_NODE DEBUG: [8] VDiskId# [80000018:1:0:7:0] -> [80000018:2:0:7:0] 2025-07-08T12:01:32.594844Z 22 04h55m02.358456s :BS_NODE DEBUG: [22] VDiskId# [80000000:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.594900Z 18 04h55m02.377968s :BS_NODE DEBUG: [18] VDiskId# [80000038:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.594956Z 18 04h55m02.518992s :BS_NODE DEBUG: [18] VDiskId# [80000018:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595002Z 22 04h55m02.952920s :BS_NODE DEBUG: [22] VDiskId# [80000030:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595066Z 22 04h55m04.556480s :BS_NODE DEBUG: [22] VDiskId# [80000028:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595270Z 18 04h55m05.180944s :BS_NODE DEBUG: [18] VDiskId# [80000010:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595334Z 22 04h55m05.309432s :BS_NODE DEBUG: [22] VDiskId# [80000020:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595405Z 22 04h55m05.815896s :BS_NODE DEBUG: [22] VDiskId# [80000008:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:32.595478Z 18 04h55m05.876408s :BS_NODE DEBUG: [18] VDiskId# [8000002f:6:0:2:0] status changed to REPLICATING 2025-07-08T12:01:32.595545Z 18 04h55m07.713968s :BS_NODE DEBUG: [18] VDiskId# [80000038:2:0:1:0] status changed to READY 2025-07-08T12:01:32.596593Z 2 04h55m07.714480s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.596609Z 2 04h55m07.714480s :BS_NODE DEBUG: [2] VDiskId# [80000038:1:0:1:0] destroyed 2025-07-08T12:01:32.596795Z 22 04h55m18.225896s :BS_NODE DEBUG: [22] VDiskId# [80000008:2:0:1:0] status changed to READY 2025-07-08T12:01:32.597951Z 2 04h55m18.226408s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.597968Z 2 04h55m18.226408s :BS_NODE DEBUG: [2] VDiskId# [80000008:1:0:1:0] destroyed 2025-07-08T12:01:32.598006Z 22 04h55m19.049456s :BS_NODE DEBUG: [22] VDiskId# [80000000:2:0:1:0] status changed to READY 2025-07-08T12:01:32.599146Z 2 04h55m19.049968s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.599160Z 2 04h55m19.049968s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] destroyed 2025-07-08T12:01:32.599242Z 22 04h55m20.699920s :BS_NODE DEBUG: [22] VDiskId# [80000030:2:0:1:0] status changed to READY 2025-07-08T12:01:32.600404Z 2 04h55m20.700432s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.600419Z 2 04h55m20.700432s :BS_NODE DEBUG: [2] VDiskId# [80000030:1:0:1:0] destroyed 2025-07-08T12:01:32.600455Z 18 04h55m21.847992s :BS_NODE DEBUG: [18] VDiskId# [80000018:2:0:1:0] status changed to READY 2025-07-08T12:01:32.601514Z 2 04h55m21.848504s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.601527Z 2 04h55m21.848504s :BS_NODE DEBUG: [2] VDiskId# [80000018:1:0:1:0] destroyed 2025-07-08T12:01:32.601558Z 22 04h55m22.044480s :BS_NODE DEBUG: [22] VDiskId# [80000028:2:0:1:0] status changed to READY 2025-07-08T12:01:32.602575Z 2 04h55m22.044992s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.602587Z 2 04h55m22.044992s :BS_NODE DEBUG: [2] VDiskId# [80000028:1:0:1:0] destroyed 2025-07-08T12:01:32.602613Z 18 04h55m22.323944s :BS_NODE DEBUG: [18] VDiskId# [80000010:2:0:1:0] status changed to READY 2025-07-08T12:01:32.603655Z 2 04h55m22.324456s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.603669Z 2 04h55m22.324456s :BS_NODE DEBUG: [2] VDiskId# [80000010:1:0:1:0] destroyed 2025-07-08T12:01:32.603789Z 18 04h55m26.854408s :BS_NODE DEBUG: [18] VDiskId# [8000002f:6:0:2:0] status changed to READY 2025-07-08T12:01:32.604694Z 2 04h55m26.854920s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.604706Z 2 04h55m26.854920s :BS_NODE DEBUG: [2] VDiskId# [8000002f:5:0:2:0] destroyed 2025-07-08T12:01:32.604897Z 22 04h55m33.829432s :BS_NODE DEBUG: [22] VDiskId# [80000020:2:0:1:0] status changed to READY 2025-07-08T12:01:32.605968Z 2 04h55m33.829944s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-07-08T12:01:32.605981Z 2 04h55m33.829944s :BS_NODE DEBUG: [2] VDiskId# [80000020:1:0:1:0] destroyed >> IndexBuildTestReboots::CancelBuild >> BsControllerTest::TestLocalSelfHeal [GOOD] >> IndexBuildTestReboots::BaseCase >> IndexBuildTestReboots::DropIndexWithDataColumns |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpdateOn >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-07-08T12:01:33.299606Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-07-08T12:01:33.299620Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:33.299641Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-07-08T12:01:33.299645Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:33.299651Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-07-08T12:01:33.299655Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:33.299661Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-07-08T12:01:33.299665Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:33.299671Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-07-08T12:01:33.299674Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:33.299680Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-07-08T12:01:33.299684Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:33.299688Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-07-08T12:01:33.299692Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:33.299698Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-07-08T12:01:33.299702Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:33.299708Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-07-08T12:01:33.299711Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:33.299718Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-07-08T12:01:33.299723Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:33.299729Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-07-08T12:01:33.299733Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:33.299740Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-07-08T12:01:33.299744Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:33.299750Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-07-08T12:01:33.299754Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:33.299760Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-07-08T12:01:33.299764Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:33.299774Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-07-08T12:01:33.299777Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:33.299783Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-07-08T12:01:33.299786Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-07-08T12:01:33.299792Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-07-08T12:01:33.299796Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-07-08T12:01:33.299802Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-07-08T12:01:33.299806Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-07-08T12:01:33.299811Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-07-08T12:01:33.299815Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-07-08T12:01:33.299821Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-07-08T12:01:33.299825Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-07-08T12:01:33.299831Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-07-08T12:01:33.299835Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-07-08T12:01:33.299841Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-07-08T12:01:33.299845Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-07-08T12:01:33.299851Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-07-08T12:01:33.299854Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-07-08T12:01:33.299860Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-07-08T12:01:33.299884Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-07-08T12:01:33.299897Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-07-08T12:01:33.299900Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-07-08T12:01:33.299905Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-07-08T12:01:33.299909Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-07-08T12:01:33.299914Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-07-08T12:01:33.299919Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-07-08T12:01:33.299930Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-07-08T12:01:33.299934Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-07-08T12:01:33.299939Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-07-08T12:01:33.299943Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-07-08T12:01:33.299948Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-07-08T12:01:33.299952Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-07-08T12:01:33.299958Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-07-08T12:01:33.299961Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-07-08T12:01:33.299966Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-07-08T12:01:33.299970Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-07-08T12:01:33.299975Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-07-08T12:01:33.299979Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-07-08T12:01:33.299984Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-07-08T12:01:33.299987Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-07-08T12:01:33.299993Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-07-08T12:01:33.299996Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-07-08T12:01:33.300004Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-07-08T12:01:33.300008Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-07-08T12:01:33.303219Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-07-08T12:01:33.303473Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-07-08T12:01:33.303485Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-07-08T12:01:33.303492Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-07-08T12:01:33.303498Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-07-08T12:01:33.303504Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-07-08T12:01:33.303510Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-07-08T12:01:33.303516Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-07-08T12:01:33.303521Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-07-08T12:01:33.303528Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-07-08T12:01:33.303534Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-07-08T12:01:33.303539Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-07-08T12:01:33.303544Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-07-08T12:01:33.303550Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-07-08T12:01:33.303558Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-07-08T12:01:33.303563Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-07-08T12:01:33.303569Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-07-08T12:01:33.303575Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-07-08T12:01:33.303581Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-07-08T12:01:33.303586Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-07-08T12:01:33.303591Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-07-08T12:01:33.303597Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-07-08T12:01:33.303602Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-07-08T12:01:33.303608Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-07-08T12:01:33.303613Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-07-08T12:01:33.303619Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-07-08T12:01:33.303625Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-07-08T12:01:33.303630Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-07-08T12:01:33.303636Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-07-08T12:01:33.303642Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-07-08T12:01:33.303648Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-07-08T12:01:33.303654Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-07-08T12:01:33.303660Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-07-08T12:01:33.303665Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-07-08T12:01:33.303671Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... Reassigner TEvVStatusResult GroupId# 2147483667 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.631869Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483667 VDiskId# [80000013:1:2:1:0] DiskIsOk# true 2025-07-08T12:01:33.631875Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483667 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.631879Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483667 VDiskId# [80000013:1:2:2:0] DiskIsOk# true 2025-07-08T12:01:33.635105Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483667 Items# [80000013:1:0:1:0]: 7:1002:1001 -> 7:1000:1010 ConfigTxSeqNo# 48 2025-07-08T12:01:33.635119Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483667 Success# true 2025-07-08T12:01:33.635150Z 34 00h05m00.105120s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-07-08T12:01:33.635161Z 34 00h05m00.105120s :BS_NODE DEBUG: [34] VDiskId# [80000013:1:2:2:0] -> [80000013:2:2:2:0] 2025-07-08T12:01:33.635176Z 19 00h05m00.105120s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-07-08T12:01:33.635183Z 19 00h05m00.105120s :BS_NODE DEBUG: [19] VDiskId# [80000013:1:1:1:0] -> [80000013:2:1:1:0] 2025-07-08T12:01:33.635196Z 4 00h05m00.105120s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:33.635203Z 4 00h05m00.105120s :BS_NODE DEBUG: [4] VDiskId# [80000013:1:0:0:0] -> [80000013:2:0:0:0] 2025-07-08T12:01:33.635215Z 22 00h05m00.105120s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-07-08T12:01:33.635223Z 22 00h05m00.105120s :BS_NODE DEBUG: [22] VDiskId# [80000013:1:1:2:0] -> [80000013:2:1:2:0] 2025-07-08T12:01:33.635235Z 7 00h05m00.105120s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.635242Z 7 00h05m00.105120s :BS_NODE DEBUG: [7] VDiskId# [80000013:2:0:1:0] PDiskId# 1000 VSlotId# 1010 created 2025-07-08T12:01:33.635252Z 7 00h05m00.105120s :BS_NODE DEBUG: [7] VDiskId# [80000013:2:0:1:0] status changed to INIT_PENDING 2025-07-08T12:01:33.635266Z 10 00h05m00.105120s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-07-08T12:01:33.635274Z 10 00h05m00.105120s :BS_NODE DEBUG: [10] VDiskId# [80000013:1:0:2:0] -> [80000013:2:0:2:0] 2025-07-08T12:01:33.635286Z 28 00h05m00.105120s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-07-08T12:01:33.635292Z 28 00h05m00.105120s :BS_NODE DEBUG: [28] VDiskId# [80000013:1:2:0:0] -> [80000013:2:2:0:0] 2025-07-08T12:01:33.635305Z 31 00h05m00.105120s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-07-08T12:01:33.635312Z 31 00h05m00.105120s :BS_NODE DEBUG: [31] VDiskId# [80000013:1:2:1:0] -> [80000013:2:2:1:0] 2025-07-08T12:01:33.635324Z 16 00h05m00.105120s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:33.635331Z 16 00h05m00.105120s :BS_NODE DEBUG: [16] VDiskId# [80000013:1:1:0:0] -> [80000013:2:1:0:0] 2025-07-08T12:01:33.635391Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483651 2025-07-08T12:01:33.635577Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635586Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:0:0:0] DiskIsOk# true 2025-07-08T12:01:33.635592Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635597Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:0:2:0] DiskIsOk# true 2025-07-08T12:01:33.635602Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635607Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:1:0:0] DiskIsOk# true 2025-07-08T12:01:33.635612Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635616Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:1:1:0] DiskIsOk# true 2025-07-08T12:01:33.635622Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635626Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:1:2:0] DiskIsOk# true 2025-07-08T12:01:33.635632Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635636Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:2:0:0] DiskIsOk# true 2025-07-08T12:01:33.635641Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635645Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:2:1:0] DiskIsOk# true 2025-07-08T12:01:33.635650Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483651 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:33.635655Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483651 VDiskId# [80000003:1:2:2:0] DiskIsOk# true 2025-07-08T12:01:33.638706Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483651 Items# [80000003:1:0:1:0]: 7:1002:1000 -> 7:1001:1010 ConfigTxSeqNo# 49 2025-07-08T12:01:33.638720Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483651 Success# true 2025-07-08T12:01:33.638749Z 34 00h05m00.105632s :BS_NODE DEBUG: [34] NodeServiceSetUpdate 2025-07-08T12:01:33.638759Z 34 00h05m00.105632s :BS_NODE DEBUG: [34] VDiskId# [80000003:1:2:2:0] -> [80000003:2:2:2:0] 2025-07-08T12:01:33.638773Z 19 00h05m00.105632s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-07-08T12:01:33.638779Z 19 00h05m00.105632s :BS_NODE DEBUG: [19] VDiskId# [80000003:1:1:1:0] -> [80000003:2:1:1:0] 2025-07-08T12:01:33.638790Z 4 00h05m00.105632s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:33.638798Z 4 00h05m00.105632s :BS_NODE DEBUG: [4] VDiskId# [80000003:1:0:0:0] -> [80000003:2:0:0:0] 2025-07-08T12:01:33.638809Z 22 00h05m00.105632s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-07-08T12:01:33.638816Z 22 00h05m00.105632s :BS_NODE DEBUG: [22] VDiskId# [80000003:1:1:2:0] -> [80000003:2:1:2:0] 2025-07-08T12:01:33.638829Z 7 00h05m00.105632s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.638836Z 7 00h05m00.105632s :BS_NODE DEBUG: [7] VDiskId# [80000003:2:0:1:0] PDiskId# 1001 VSlotId# 1010 created 2025-07-08T12:01:33.638848Z 7 00h05m00.105632s :BS_NODE DEBUG: [7] VDiskId# [80000003:2:0:1:0] status changed to INIT_PENDING 2025-07-08T12:01:33.638861Z 10 00h05m00.105632s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-07-08T12:01:33.638867Z 10 00h05m00.105632s :BS_NODE DEBUG: [10] VDiskId# [80000003:1:0:2:0] -> [80000003:2:0:2:0] 2025-07-08T12:01:33.638881Z 28 00h05m00.105632s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-07-08T12:01:33.638887Z 28 00h05m00.105632s :BS_NODE DEBUG: [28] VDiskId# [80000003:1:2:0:0] -> [80000003:2:2:0:0] 2025-07-08T12:01:33.638898Z 31 00h05m00.105632s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-07-08T12:01:33.638904Z 31 00h05m00.105632s :BS_NODE DEBUG: [31] VDiskId# [80000003:1:2:1:0] -> [80000003:2:2:1:0] 2025-07-08T12:01:33.638916Z 16 00h05m00.105632s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:33.638925Z 16 00h05m00.105632s :BS_NODE DEBUG: [16] VDiskId# [80000003:1:1:0:0] -> [80000003:2:1:0:0] 2025-07-08T12:01:33.639141Z 7 00h05m01.630120s :BS_NODE DEBUG: [7] VDiskId# [80000013:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639235Z 7 00h05m02.121560s :BS_NODE DEBUG: [7] VDiskId# [80000063:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639363Z 7 00h05m02.785608s :BS_NODE DEBUG: [7] VDiskId# [80000023:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639467Z 7 00h05m02.797072s :BS_NODE DEBUG: [7] VDiskId# [80000053:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639571Z 7 00h05m03.392096s :BS_NODE DEBUG: [7] VDiskId# [80000033:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639672Z 7 00h05m03.589584s :BS_NODE DEBUG: [7] VDiskId# [80000043:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.639775Z 7 00h05m04.663048s :BS_NODE DEBUG: [7] VDiskId# [80000073:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.640067Z 7 00h05m05.517632s :BS_NODE DEBUG: [7] VDiskId# [80000003:2:0:1:0] status changed to REPLICATING 2025-07-08T12:01:33.640231Z 7 00h05m14.688120s :BS_NODE DEBUG: [7] VDiskId# [80000013:2:0:1:0] status changed to READY 2025-07-08T12:01:33.645319Z 7 00h05m14.688632s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.645343Z 7 00h05m14.688632s :BS_NODE DEBUG: [7] VDiskId# [80000013:1:0:1:0] destroyed 2025-07-08T12:01:33.645396Z 7 00h05m14.897048s :BS_NODE DEBUG: [7] VDiskId# [80000073:2:0:1:0] status changed to READY 2025-07-08T12:01:33.646971Z 7 00h05m14.897560s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.646986Z 7 00h05m14.897560s :BS_NODE DEBUG: [7] VDiskId# [80000073:1:0:1:0] destroyed 2025-07-08T12:01:33.647270Z 7 00h05m20.299608s :BS_NODE DEBUG: [7] VDiskId# [80000023:2:0:1:0] status changed to READY 2025-07-08T12:01:33.648815Z 7 00h05m20.300120s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.648829Z 7 00h05m20.300120s :BS_NODE DEBUG: [7] VDiskId# [80000023:1:0:1:0] destroyed 2025-07-08T12:01:33.648911Z 7 00h05m25.375584s :BS_NODE DEBUG: [7] VDiskId# [80000043:2:0:1:0] status changed to READY 2025-07-08T12:01:33.650562Z 7 00h05m25.376096s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.650577Z 7 00h05m25.376096s :BS_NODE DEBUG: [7] VDiskId# [80000043:1:0:1:0] destroyed 2025-07-08T12:01:33.650827Z 7 00h05m30.737560s :BS_NODE DEBUG: [7] VDiskId# [80000063:2:0:1:0] status changed to READY 2025-07-08T12:01:33.652366Z 7 00h05m30.738072s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.652381Z 7 00h05m30.738072s :BS_NODE DEBUG: [7] VDiskId# [80000063:1:0:1:0] destroyed 2025-07-08T12:01:33.652422Z 7 00h05m34.870096s :BS_NODE DEBUG: [7] VDiskId# [80000033:2:0:1:0] status changed to READY 2025-07-08T12:01:33.654045Z 7 00h05m34.870608s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.654063Z 7 00h05m34.870608s :BS_NODE DEBUG: [7] VDiskId# [80000033:1:0:1:0] destroyed 2025-07-08T12:01:33.654240Z 7 00h05m36.278072s :BS_NODE DEBUG: [7] VDiskId# [80000053:2:0:1:0] status changed to READY 2025-07-08T12:01:33.655917Z 7 00h05m36.278584s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.655930Z 7 00h05m36.278584s :BS_NODE DEBUG: [7] VDiskId# [80000053:1:0:1:0] destroyed 2025-07-08T12:01:33.655969Z 7 00h05m39.769632s :BS_NODE DEBUG: [7] VDiskId# [80000003:2:0:1:0] status changed to READY 2025-07-08T12:01:33.657682Z 7 00h05m39.770144s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:33.657696Z 7 00h05m39.770144s :BS_NODE DEBUG: [7] VDiskId# [80000003:1:0:1:0] destroyed |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest |69.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |69.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> IndexBuildTestReboots::IndexPartitioning >> IndexBuildTestReboots::DropIndex |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |69.7%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |69.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-07-08T12:01:06.497018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679907623931897:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.497040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:06.584605Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.591577Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:01:06.592816Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679909283879755:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:06.592965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001698/r3tmp/tmp5SYxLj/pdisk_1.dat 2025-07-08T12:01:06.656141Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23796, node 1 2025-07-08T12:01:06.692658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.692687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.694798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.705793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:06.705825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:06.714100Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:01:06.716190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001698/r3tmp/yandex8UzPhD.tmp 2025-07-08T12:01:06.716196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001698/r3tmp/yandex8UzPhD.tmp 2025-07-08T12:01:06.716260Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001698/r3tmp/yandex8UzPhD.tmp 2025-07-08T12:01:06.716294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:06.717973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:06.746443Z INFO: TTestServer started on Port 27502 GrpcPort 23796 TClient is connected to server localhost:27502 PQClient connected to localhost:23796 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:06.800730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:06.903036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:07.031044Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:07.104931Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679911918900158:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:01:07.105572Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmE0NzI3ZDAtNzhhODdiNWYtYjMzODQ0MzYtZDg1Y2UwYTA=, ActorId: [1:7524679911918900156:2287], ActorState: ExecuteState, TraceId: 01jzmyjfzteb0mxvdw4tgx9a3a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:01:07.107825Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:01:07.125543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.171786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:07.282073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T12:01:07.385481Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jzmyjg891are28b4f6mqk2gc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzhiMjE1OTUtMjI2ZmM3M2QtNTU0YmNlM2QtOTBkYzgyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679911918900583:2986] 2025-07-08T12:01:07.497721Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:07.537469Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:11.501032Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679907623931897:2080];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.501080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:01:11.537328Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679909283879755:2246];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:11.537361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:01:13.425190Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7524679907623932097:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:13.425265Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7524679907623932097:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-07-08T12:01:13.425286Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7524679907623932097:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7524679907623932595:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976066876 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-07-08T12:01:13.425304Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7524679907623932097:2128], cacheItem# { Subscriber: { Subscriber: [1:7524679907623932595:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 11 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1751976066876 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 11 IsSync: true Partial: 0 } 2025-07-08T12:01:13.425357Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7524679937688704823:3273], recipient# [1:7524679937688704822:3272], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-07-08T12:01:13.425547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:13.429044Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7524679909283879776:2105], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 7205759404 ... me@random@consumer_7_1_1257612066546215437_v1 grpc read done: success# 0, data# { } 2025-07-08T12:01:33.862210Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_1257612066546215437_v1 grpc read failed 2025-07-08T12:01:33.862216Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_1257612066546215437_v1 grpc closed 2025-07-08T12:01:33.862230Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer some@random@consumer session some@random@consumer_7_1_1257612066546215437_v1 is DEAD 2025-07-08T12:01:33.863001Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863014Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7524680022896801710:2450] destroyed 2025-07-08T12:01:33.863020Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] Destroy direct read session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863023Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037895] server disconnected, pipe [7:7524680022896801707:2449] destroyed 2025-07-08T12:01:33.863027Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863031Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7524680022896801715:2453] destroyed 2025-07-08T12:01:33.863044Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863047Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863048Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863124Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863130Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7524680022896801712:2451] destroyed 2025-07-08T12:01:33.863134Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863136Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [7:7524680022896801713:2452] destroyed 2025-07-08T12:01:33.863139Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863142Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863156Z node 7 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 123|8301957e-951f3abd-ec493499-e34cdb20_0 grpc read done: success: 0 data: 2025-07-08T12:01:33.863158Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|8301957e-951f3abd-ec493499-e34cdb20_0 grpc read failed 2025-07-08T12:01:33.863164Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|8301957e-951f3abd-ec493499-e34cdb20_0 grpc closed 2025-07-08T12:01:33.863166Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|8301957e-951f3abd-ec493499-e34cdb20_0 is DEAD 2025-07-08T12:01:33.863394Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:33.863418Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][topic2] pipe [7:7524680022896801704:2446] disconnected; active server actors: 1 2025-07-08T12:01:33.863421Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][topic2] pipe [7:7524680022896801704:2446] client some@random@consumer disconnected session some@random@consumer_7_1_1257612066546215437_v1 2025-07-08T12:01:33.863654Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7524680022896801698:2439] destroyed 2025-07-08T12:01:33.863669Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-07-08T12:01:33.863758Z :INFO: [/Root] TraceId [] SessionId [123|8301957e-951f3abd-ec493499-e34cdb20_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:33.864178Z :DEBUG: [/Root] TraceId [] SessionId [123|8301957e-951f3abd-ec493499-e34cdb20_0] MessageGroupId [123] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-07-08T12:01:33.864201Z :DEBUG: [/Root] TraceId [] SessionId [123|8301957e-951f3abd-ec493499-e34cdb20_0] MessageGroupId [123] Write session is aborting and will not restart 2025-07-08T12:01:33.864238Z :DEBUG: [/Root] TraceId [] SessionId [123|8301957e-951f3abd-ec493499-e34cdb20_0] MessageGroupId [123] Write session: destroy 2025-07-08T12:01:34.004457Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7524680001421962813:2116], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.004509Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7524680001421962813:2116], cacheItem# { Subscriber: { Subscriber: [7:7524680005716931361:2981] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:34.004554Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7524680027191769042:3859], recipient# [7:7524680027191769041:2466], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.009219Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7524680001421962813:2116], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.009267Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7524680001421962813:2116], cacheItem# { Subscriber: { Subscriber: [7:7524680005716931361:2981] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:34.009310Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7524680027191769044:3860], recipient# [7:7524680027191769043:2467], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.015640Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7524680003097409295:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.015685Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7524680003097409295:2102], cacheItem# { Subscriber: { Subscriber: [8:7524680007392376767:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:34.015698Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7524680028867213789:2606], recipient# [8:7524680028867213788:2317], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.023300Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7524680003097409295:2102], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-07-08T12:01:34.023340Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7524680003097409295:2102], cacheItem# { Subscriber: { Subscriber: [8:7524680007392376767:2202] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-07-08T12:01:34.023365Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7524680028867213791:2607], recipient# [8:7524680028867213790:2318], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> TPDiskTest::DeviceHaltTooLong [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> TPDiskTest::ChangePDiskKey >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> TPDiskTest::ChangePDiskKey [GOOD] >> TPDiskTest::RecreateWithInvalidPDiskKey >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-07-08T12:01:30.289582Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-07-08T12:01:30.289604Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:30.289629Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-07-08T12:01:30.289634Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:30.289640Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-07-08T12:01:30.289643Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:30.289650Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-07-08T12:01:30.289654Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:30.289660Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-07-08T12:01:30.289664Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:30.289671Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-07-08T12:01:30.289675Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:30.289681Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-07-08T12:01:30.289685Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:30.289691Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-07-08T12:01:30.289695Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:30.289701Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-07-08T12:01:30.289705Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:30.289711Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-07-08T12:01:30.289715Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:30.289721Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-07-08T12:01:30.289726Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:30.289732Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-07-08T12:01:30.289736Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:30.289741Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-07-08T12:01:30.289745Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:30.289751Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-07-08T12:01:30.289755Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:30.289765Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-07-08T12:01:30.289769Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:30.289775Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-07-08T12:01:30.289779Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-07-08T12:01:30.289785Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-07-08T12:01:30.289789Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-07-08T12:01:30.289796Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-07-08T12:01:30.289800Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-07-08T12:01:30.289806Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-07-08T12:01:30.289810Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-07-08T12:01:30.289816Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-07-08T12:01:30.289820Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-07-08T12:01:30.289826Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-07-08T12:01:30.289829Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-07-08T12:01:30.289835Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-07-08T12:01:30.289839Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-07-08T12:01:30.289844Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-07-08T12:01:30.289848Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-07-08T12:01:30.289854Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-07-08T12:01:30.289861Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-07-08T12:01:30.289869Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-07-08T12:01:30.289873Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-07-08T12:01:30.289878Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-07-08T12:01:30.289882Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-07-08T12:01:30.289888Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-07-08T12:01:30.289894Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-07-08T12:01:30.289904Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-07-08T12:01:30.289909Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-07-08T12:01:30.289914Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-07-08T12:01:30.289919Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-07-08T12:01:30.289925Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-07-08T12:01:30.289929Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-07-08T12:01:30.289936Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-07-08T12:01:30.289939Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-07-08T12:01:30.289945Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-07-08T12:01:30.289948Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-07-08T12:01:30.289952Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-07-08T12:01:30.289954Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-07-08T12:01:30.289958Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-07-08T12:01:30.289960Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-07-08T12:01:30.289964Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-07-08T12:01:30.289968Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-07-08T12:01:30.289976Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-07-08T12:01:30.289980Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-07-08T12:01:30.292546Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-07-08T12:01:30.292700Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-07-08T12:01:30.292705Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-07-08T12:01:30.292709Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-07-08T12:01:30.292713Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-07-08T12:01:30.292717Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-07-08T12:01:30.292721Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-07-08T12:01:30.292725Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-07-08T12:01:30.292728Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-07-08T12:01:30.292732Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-07-08T12:01:30.292736Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-07-08T12:01:30.292740Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-07-08T12:01:30.292744Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-07-08T12:01:30.292748Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-07-08T12:01:30.358182Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-07-08T12:01:30.358206Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-07-08T12:01:30.358216Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-07-08T12:01:30.358226Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-07-08T12:01:30.358236Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-07-08T12:01:30.358244Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-07-08T12:01:30.358252Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-07-08T12:01:30.358261Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-07-08T12:01:30.358270Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-07-08T12:01:30.358279Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-07-08T12:01:30.358289Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-07-08T12:01:30.358298Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-07-08T12:01:30.358305Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-07-08T12:01:30.358314Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-07-08T12:01:30.358324Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-07-08T12:01:30.358334Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-07-08T12:01:30.358343Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-07-08T12:01:30.358353Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-07-08T12:01:30.358364Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-07-08T12:01:30.358373Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-07-08T12:01:30.358382Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-07-08T12:01:34.927072Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-07-08T12:01:34.927078Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-07-08T12:01:34.927084Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-07-08T12:01:34.927090Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-07-08T12:01:34.927161Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-07-08T12:01:34.974724Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-07-08T12:01:34.974757Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-07-08T12:01:34.974765Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-07-08T12:01:34.974772Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-07-08T12:01:34.974780Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-07-08T12:01:34.974787Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-07-08T12:01:34.974794Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-07-08T12:01:34.974801Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-07-08T12:01:34.974807Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-07-08T12:01:34.974814Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-07-08T12:01:34.974821Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-07-08T12:01:34.974828Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-07-08T12:01:34.974835Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-07-08T12:01:34.974842Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-07-08T12:01:34.974848Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-07-08T12:01:34.974854Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-07-08T12:01:34.975086Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-07-08T12:01:34.975100Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-07-08T12:01:34.975108Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-07-08T12:01:34.975114Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-07-08T12:01:34.975121Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-07-08T12:01:34.975127Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-07-08T12:01:34.975133Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-07-08T12:01:34.975138Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-07-08T12:01:34.975144Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-07-08T12:01:34.975150Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-07-08T12:01:34.975156Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-07-08T12:01:34.975162Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-07-08T12:01:34.975213Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:34.975226Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-07-08T12:01:34.975233Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-07-08T12:01:34.975239Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-07-08T12:01:34.975246Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-07-08T12:01:34.975251Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-07-08T12:01:34.975257Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-07-08T12:01:34.975265Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-07-08T12:01:34.975271Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-07-08T12:01:34.976135Z 10 01h25m01.343560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-07-08T12:01:34.976229Z 5 01h25m01.371560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976305Z 7 01h25m01.414560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-07-08T12:01:34.976398Z 4 01h25m01.456560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976466Z 4 01h25m01.493560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976518Z 4 01h25m01.780560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976572Z 10 01h25m02.008560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-07-08T12:01:34.976629Z 2 01h25m02.190560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976702Z 10 01h25m03.054560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-07-08T12:01:34.976758Z 7 01h25m03.110560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-07-08T12:01:34.976810Z 5 01h25m03.731560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.976861Z 7 01h25m03.910560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-07-08T12:01:34.976920Z 2 01h25m04.033560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.977003Z 8 01h25m04.137560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-07-08T12:01:34.977077Z 4 01h25m04.651560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-07-08T12:01:34.977698Z 7 01h25m05.800560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-07-08T12:01:34.978128Z 5 01h25m15.529560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-07-08T12:01:34.980558Z 1 01h25m15.530072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:34.980578Z 1 01h25m15.530072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-07-08T12:01:34.980637Z 2 01h25m16.686560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-07-08T12:01:34.982390Z 1 01h25m16.687072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:34.982410Z 1 01h25m16.687072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-07-08T12:01:34.982453Z 10 01h25m17.462560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-07-08T12:01:34.984032Z 1 01h25m17.463072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:34.984045Z 1 01h25m17.463072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-07-08T12:01:34.984137Z 2 01h25m20.624560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-07-08T12:01:34.985720Z 1 01h25m20.625072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:34.985738Z 1 01h25m20.625072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-07-08T12:01:34.985771Z 4 01h25m20.873560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-07-08T12:01:34.987479Z 1 01h25m20.874072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:34.987497Z 1 01h25m20.874072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-07-08T12:01:34.987533Z 7 01h25m21.316560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-07-08T12:01:34.989139Z 1 01h25m21.317072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.078676Z 1 01h25m21.317072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-07-08T12:01:35.078784Z 7 01h25m21.837560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-07-08T12:01:35.081492Z 1 01h25m21.838072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.081519Z 1 01h25m21.838072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-07-08T12:01:35.081578Z 4 01h25m23.123560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-07-08T12:01:35.083440Z 1 01h25m23.124072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.083460Z 1 01h25m23.124072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-07-08T12:01:35.083499Z 4 01h25m23.155560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-07-08T12:01:35.087471Z 1 01h25m23.156072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.087492Z 1 01h25m23.156072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-07-08T12:01:35.087537Z 10 01h25m24.380560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-07-08T12:01:35.089262Z 1 01h25m24.381072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.089283Z 1 01h25m24.381072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-07-08T12:01:35.089324Z 8 01h25m24.582560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-07-08T12:01:35.091198Z 1 01h25m24.583072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.091220Z 1 01h25m24.583072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-07-08T12:01:35.091596Z 4 01h25m30.089560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-07-08T12:01:35.093667Z 1 01h25m30.090072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.093686Z 1 01h25m30.090072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-07-08T12:01:35.093724Z 10 01h25m30.482560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-07-08T12:01:35.095436Z 1 01h25m30.483072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.095453Z 1 01h25m30.483072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-07-08T12:01:35.095569Z 7 01h25m31.540560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-07-08T12:01:35.099889Z 1 01h25m31.541072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.099910Z 1 01h25m31.541072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-07-08T12:01:35.100142Z 7 01h25m35.108560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-07-08T12:01:35.101877Z 1 01h25m35.109072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.101893Z 1 01h25m35.109072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2025-07-08T12:01:35.102133Z 5 01h25m38.442560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-07-08T12:01:35.103747Z 1 01h25m38.443072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:35.103761Z 1 01h25m38.443072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed >> TPDiskTest::RecreateWithInvalidPDiskKey [GOOD] >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] |69.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 9810, MsgBus: 21084 2025-07-08T12:01:33.495337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680025062835010:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:33.495706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5b/r3tmp/tmpcsdXwR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9810, node 1 2025-07-08T12:01:33.557265Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:33.562112Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:33.562122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:33.562124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:33.562172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21084 2025-07-08T12:01:33.595950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:33.595983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:21084 2025-07-08T12:01:33.597143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:33.626007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.629933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:33.640165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:33.705773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.765844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.775597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.858278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.871716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.928575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.941967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.965384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.983582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.998632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.181445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6140, MsgBus: 4702 2025-07-08T12:01:34.652238Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680027947804717:2090];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:34.653952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5b/r3tmp/tmpQwDbAP/pdisk_1.dat 2025-07-08T12:01:34.665652Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6140, node 2 2025-07-08T12:01:34.680999Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:34.681009Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:34.681011Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:34.681057Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4702 TClient is connected to server localhost:4702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:34.755301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:34.755328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:34.755661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.756691Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:34.757133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:34.821335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.844495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:34.866467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.878393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:35.004937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.014404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.025165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.038740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.113823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.126065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.139469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.298279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.362109Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774641:2446] TxId: 281474976715674. Ctx: { TraceId: 01jzmykbkmbm4r4600hfmwr2n1, Database: /Root, DatabaseId: /Root, SessionId: ydb://sessi ... [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-07-08T12:01:35.393877Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-07-08T12:01:35.393937Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-07-08T12:01:35.393976Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715676. Shard resolve complete, resolved shards: 1 2025-07-08T12:01:35.393986Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Shards nodes resolved, success: 1, failed: 0 2025-07-08T12:01:35.393991Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Shards on nodes: node 2: [72075186224037922] 2025-07-08T12:01:35.394004Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1751976095394} 2025-07-08T12:01:35.394081Z node 2 :KQP_EXECUTER INFO: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-07-08T12:01:35.394091Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [2:7524680032242774687:2446], 2025-07-08T12:01:35.394095Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7524680032242774687:2446], 2025-07-08T12:01:35.394097Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-07-08T12:01:35.394198Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [2:7524680032242774687:2446], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-07-08T12:01:35.394218Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Waiting for: CA [2:7524680032242774687:2446], 2025-07-08T12:01:35.394221Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7524680032242774687:2446], 2025-07-08T12:01:35.394774Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: ExecuteState, got execution state from compute actor: [2:7524680032242774687:2446], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 227 Tasks { TaskId: 1 CpuTimeUs: 45 FinishTimeMs: 1751976095394 OutputRows: 1 OutputBytes: 22 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 22 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 30 HostName: "ghrun-3z2hjo4icm" NodeId: 2 StartTimeMs: 1751976095394 CreateTimeMs: 1751976095394 UpdateTimeMs: 1751976095394 } MaxMemoryUsage: 1048576 } 2025-07-08T12:01:35.394798Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Compute actor has finished execution: [2:7524680032242774687:2446] 2025-07-08T12:01:35.394846Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-07-08T12:01:35.394861Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774683:2446] TxId: 281474976715676. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000227s ReadRows: 1 ReadBytes: 22 ru: 1 rate limiter was not found force flag: 1 2025-07-08T12:01:35.394974Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Resolved key sets: 0 2025-07-08T12:01:35.395001Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {18446744073709551615, 1751976095394} 2025-07-08T12:01:35.395037Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037922, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976715674 DataShard: 72075186224037922 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 13 HasWrites: true } SendingShards: 72075186224037922 ReceivingShards: 72075186224037922 Op: Commit, immediate: 1 2025-07-08T12:01:35.395058Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2025-07-08T12:01:35.395071Z node 2 :KQP_EXECUTER INFO: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-07-08T12:01:35.395079Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037922 not finished yet: Executing 2025-07-08T12:01:35.395084Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037922 (Executing), 2025-07-08T12:01:35.395087Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-07-08T12:01:35.396451Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037922, status: COMPLETE, error: 2025-07-08T12:01:35.396483Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-07-08T12:01:35.396490Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:7524680032242774690:2446] TxId: 281474976715677. Ctx: { TraceId: 01jzmykbmj8affbscp1x382dgf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTE4Y2I1ZGMtNTFhOGJmMDMtOGE2MDE1MDEtNjc0NDcxMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KikimrIcGateway::TestCreateSameExternalTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 28597, MsgBus: 10755 2025-07-08T12:01:33.312124Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680024438307741:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:33.313762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5c/r3tmp/tmp8YGtvN/pdisk_1.dat 2025-07-08T12:01:33.374076Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28597, node 1 2025-07-08T12:01:33.413413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:33.413440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:33.414484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:33.428714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:33.428726Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:33.428728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:33.428766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10755 TClient is connected to server localhost:10755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:33.508515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.516550Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:33.537782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.603433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.622060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:33.641857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.736826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.753883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.764761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.779261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.792999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.810534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.829236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.023383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22036, MsgBus: 14027 2025-07-08T12:01:34.416495Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680029526349681:2073];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:34.417534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5c/r3tmp/tmpa6ixdi/pdisk_1.dat 2025-07-08T12:01:34.437831Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22036, node 2 2025-07-08T12:01:34.449439Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:34.449449Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:34.449451Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:34.449496Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14027 TClient is connected to server localhost:14027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:34.516660Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:34.516689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:34.517681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:34.519309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.520983Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:34.624234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.635322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.658488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.673389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.758088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.768135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.780426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.835977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.849121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.866648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.878022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.082384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6426, MsgBus: 25549 2025-07-08T12:01:33.400387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680027054980102:2223];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:33.400479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5d/r3tmp/tmp1vrR4f/pdisk_1.dat 2025-07-08T12:01:33.488271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6426, node 1 2025-07-08T12:01:33.511372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:33.511384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:33.511386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:33.511431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25549 TClient is connected to server localhost:25549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:33.558852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:33.558883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:33.561394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:33.563039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.569467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:33.596468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.629951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.646411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.790974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.801028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.813201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.828402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.844905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.904160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.916273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.128470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20969, MsgBus: 28236 2025-07-08T12:01:34.522899Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680029302887595:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:34.522928Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5d/r3tmp/tmpGHoTJ3/pdisk_1.dat 2025-07-08T12:01:34.540208Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20969, node 2 2025-07-08T12:01:34.553201Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:34.553213Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:34.553215Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:34.553263Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28236 TClient is connected to server localhost:28236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:34.620318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:34.620347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:34.620688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.626785Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:34.627016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:34.653932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.725772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.765613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.785963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.914220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.922972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.934241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.949234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.958700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.972176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.037003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.199635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> BsControllerTest::SelfHealMirror3dc [GOOD] >> KikimrIcGateway::TestLoadExternalTable >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> TPDiskTest::PDiskIncreaseLogChunksLimitAfterRestart [GOOD] >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-07-08T12:01:32.875931Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-07-08T12:01:32.875951Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-07-08T12:01:32.875969Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-07-08T12:01:32.875973Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-07-08T12:01:32.875980Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-07-08T12:01:32.875983Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-07-08T12:01:32.875990Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-07-08T12:01:32.875992Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-07-08T12:01:32.875996Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-07-08T12:01:32.875999Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-07-08T12:01:32.876002Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-07-08T12:01:32.876005Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-07-08T12:01:32.876008Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-07-08T12:01:32.876010Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-07-08T12:01:32.876014Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-07-08T12:01:32.876016Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-07-08T12:01:32.876020Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-07-08T12:01:32.876022Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-07-08T12:01:32.876026Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-07-08T12:01:32.876028Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-07-08T12:01:32.876032Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-07-08T12:01:32.876034Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-07-08T12:01:32.876037Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-07-08T12:01:32.876040Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-07-08T12:01:32.876044Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-07-08T12:01:32.876046Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-07-08T12:01:32.876049Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-07-08T12:01:32.876052Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-07-08T12:01:32.876059Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-07-08T12:01:32.876061Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-07-08T12:01:32.876065Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-07-08T12:01:32.876067Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-07-08T12:01:32.876071Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-07-08T12:01:32.876073Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-07-08T12:01:32.876077Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-07-08T12:01:32.876079Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-07-08T12:01:32.876083Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-07-08T12:01:32.876086Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-07-08T12:01:32.876089Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-07-08T12:01:32.876092Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-07-08T12:01:32.876095Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-07-08T12:01:32.876097Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-07-08T12:01:32.876101Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-07-08T12:01:32.876103Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-07-08T12:01:32.876107Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-07-08T12:01:32.876109Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-07-08T12:01:32.876113Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-07-08T12:01:32.876118Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-07-08T12:01:32.876124Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-07-08T12:01:32.876126Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-07-08T12:01:32.876129Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-07-08T12:01:32.876132Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-07-08T12:01:32.876135Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-07-08T12:01:32.876138Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-07-08T12:01:32.876145Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-07-08T12:01:32.876148Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-07-08T12:01:32.876151Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-07-08T12:01:32.876153Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-07-08T12:01:32.876157Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-07-08T12:01:32.876159Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-07-08T12:01:32.876162Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-07-08T12:01:32.876165Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-07-08T12:01:32.876168Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-07-08T12:01:32.876171Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-07-08T12:01:32.876174Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-07-08T12:01:32.876176Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-07-08T12:01:32.876179Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-07-08T12:01:32.876182Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-07-08T12:01:32.876185Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-07-08T12:01:32.876187Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-07-08T12:01:32.876193Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-07-08T12:01:32.876195Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-07-08T12:01:32.878393Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2713:53] Status# ERROR ClientId# [1:2713:53] ServerId# [0:0:0] PipeClient# [1:2713:53] 2025-07-08T12:01:32.878624Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2714:41] Status# ERROR ClientId# [2:2714:41] ServerId# [0:0:0] PipeClient# [2:2714:41] 2025-07-08T12:01:32.878635Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2715:41] Status# ERROR ClientId# [3:2715:41] ServerId# [0:0:0] PipeClient# [3:2715:41] 2025-07-08T12:01:32.878641Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2716:41] Status# ERROR ClientId# [4:2716:41] ServerId# [0:0:0] PipeClient# [4:2716:41] 2025-07-08T12:01:32.878648Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2717:41] Status# ERROR ClientId# [5:2717:41] ServerId# [0:0:0] PipeClient# [5:2717:41] 2025-07-08T12:01:32.878654Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2718:41] Status# ERROR ClientId# [6:2718:41] ServerId# [0:0:0] PipeClient# [6:2718:41] 2025-07-08T12:01:32.878661Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2719:41] Status# ERROR ClientId# [7:2719:41] ServerId# [0:0:0] PipeClient# [7:2719:41] 2025-07-08T12:01:32.878667Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2720:41] Status# ERROR ClientId# [8:2720:41] ServerId# [0:0:0] PipeClient# [8:2720:41] 2025-07-08T12:01:32.878673Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2721:41] Status# ERROR ClientId# [9:2721:41] ServerId# [0:0:0] PipeClient# [9:2721:41] 2025-07-08T12:01:32.878679Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2722:41] Status# ERROR ClientId# [10:2722:41] ServerId# [0:0:0] PipeClient# [10:2722:41] 2025-07-08T12:01:32.878685Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2723:41] Status# ERROR ClientId# [11:2723:41] ServerId# [0:0:0] PipeClient# [11:2723:41] 2025-07-08T12:01:32.878691Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2724:41] Status# ERROR ClientId# [12:2724:41] ServerId# [0:0:0] PipeClient# [12:2724:41] 2025-07-08T12:01:32.878698Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2725:41] Status# ERROR ClientId# [13:2725:41] ServerId# [0:0:0] PipeClient# [13:2725:41] 2025-07-08T12:01:32.878704Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2726:41] Status# ERROR ClientId# [14:2726:41] ServerId# [0:0:0] PipeClient# [14:2726:41] 2025-07-08T12:01:32.878714Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2727:41] Status# ERROR ClientId# [15:2727:41] ServerId# [0:0:0] PipeClient# [15:2727:41] 2025-07-08T12:01:32.878720Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2728:41] Status# ERROR ClientId# [16:2728:41] ServerId# [0:0:0] PipeClient# [16:2728:41] 2025-07-08T12:01:32.878726Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2729:41] Status# ERROR ClientId# [17:2729:41] ServerId# [0:0:0] PipeClient# [17:2729:41] 2025-07-08T12:01:32.878732Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2730:41] Status# ERROR ClientId# [18:2730:41] ServerId# [0:0:0] PipeClient# [18:2730:41] 2025-07-08T12:01:32.878738Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2731:41] Status# ERROR ClientId# [19:2731:41] ServerId# [0:0:0] PipeClient# [19:2731:41] 2025-07-08T12:01:32.878745Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2732:41] Status# ERROR ClientId# [20:2732:41] ServerId# [0:0:0] PipeClient# [20:2732:41] 2025-07-08T12:01:32.878750Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2733:41] Status# ERROR ClientId# [21:2733:41] ServerId# [0:0:0] PipeClient# [21:2733:41] 2025-07-08T12:01:32.878757Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2734:41] Status# ERROR ClientId# [22:2734:41] ServerId# [0:0:0] PipeClient# [22:2734:41] 2025-07-08T12:01:32.878763Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2735:41] Status# ERROR ClientId# [23:2735:41] ServerId# [0:0:0] PipeClient# [23:2735:41] 2025-07-08T12:01:32.878769Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2736:41] Status# ERROR ClientId# [24:2736:41] ServerId# [0:0:0] PipeClient# [24:2736:41] 2025-07-08T12:01:32.878776Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2737:41] Status# ERROR ClientId# [25:2737:41] ServerId# [0:0:0] PipeClient# [25:2737:41] 2025-07-08T12:01:32.878782Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2738:41] Status# ERROR ClientId# [26:2738:41] ServerId# [0:0:0] PipeClient# [26:2738:41] 2025-07-08T12:01:32.878788Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2739:41] Status# ERROR ClientId# [27:2739:41] ServerId# [0:0:0] PipeClient# [27:2739:41] 2025-07-08T12:01:32.878795Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2740:41] Status# ERROR ClientId# [28:2740:41] ServerId# [0:0:0] PipeClient# [28:2740:41] 2025-07-08T12:01:32.878801Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2741:41] Status# ERROR ClientId# [29:2741:41] ServerId# [0:0:0] PipeClient# [29:2741:41] 2025-07-08T12:01:32.878807Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2742:41] Status# ERROR ClientId# [30:2742:41] ServerId# [0:0:0] PipeClient# [30:2742:41] 2025-07-08T12:01:32.878813Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2743:41] Status# ERROR ClientId# [31:2743:41] ServerId# [0:0:0] PipeClient# [31:2743:41] 2025-07-08T12:01:32.878820Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2744:41] Status# ERROR ClientId# [32:2744:41] ServerId# [0:0:0] PipeClient# [32:2744:41] 2025-07-08T12:01:32.878826Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2745:41] Status# ERROR ClientId# [33:2745:41] ServerId# [0:0:0] PipeClient# [33:2745:41] 2025-07-08T12:01:32.878832Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2746:41] Status# ERROR ClientId# [34:2746:41] ServerId# [0:0:0] PipeClient# [34:2746:41] 2025-07-08T12:01:32.878838Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2747:41] Status# ERROR ClientId# [35:2747:41 ... [80000014:2:2:1:0] DiskIsOk# true 2025-07-08T12:01:36.056606Z 1 05h45m00.121504s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483668 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.056614Z 1 05h45m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483668 VDiskId# [80000014:2:2:2:0] DiskIsOk# true 2025-07-08T12:01:36.060740Z 1 05h45m00.122016s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:36.060771Z 1 05h45m00.122016s :BS_NODE DEBUG: [1] VDiskId# [80000014:2:0:0:0] -> [80000014:3:0:0:0] 2025-07-08T12:01:36.060994Z 1 05h45m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483668 Items# [80000014:2:1:1:0]: 16:1003:1001 -> 23:1001:1014 ConfigTxSeqNo# 538 2025-07-08T12:01:36.061005Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483668 Success# true 2025-07-08T12:01:36.061051Z 36 05h45m00.122016s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-07-08T12:01:36.061061Z 36 05h45m00.122016s :BS_NODE DEBUG: [36] VDiskId# [80000014:2:2:1:0] -> [80000014:3:2:1:0] 2025-07-08T12:01:36.061076Z 19 05h45m00.122016s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-07-08T12:01:36.061084Z 19 05h45m00.122016s :BS_NODE DEBUG: [19] VDiskId# [80000014:2:1:2:0] -> [80000014:3:1:2:0] 2025-07-08T12:01:36.061099Z 4 05h45m00.122016s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:36.061108Z 4 05h45m00.122016s :BS_NODE DEBUG: [4] VDiskId# [80000014:2:0:1:0] -> [80000014:3:0:1:0] 2025-07-08T12:01:36.061123Z 23 05h45m00.122016s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-07-08T12:01:36.061133Z 23 05h45m00.122016s :BS_NODE DEBUG: [23] VDiskId# [80000014:3:1:1:0] PDiskId# 1001 VSlotId# 1014 created 2025-07-08T12:01:36.061154Z 23 05h45m00.122016s :BS_NODE DEBUG: [23] VDiskId# [80000014:3:1:1:0] status changed to INIT_PENDING 2025-07-08T12:01:36.061170Z 7 05h45m00.122016s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:36.061179Z 7 05h45m00.122016s :BS_NODE DEBUG: [7] VDiskId# [80000014:2:0:2:0] -> [80000014:3:0:2:0] 2025-07-08T12:01:36.061193Z 25 05h45m00.122016s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-07-08T12:01:36.061201Z 25 05h45m00.122016s :BS_NODE DEBUG: [25] VDiskId# [80000014:2:2:0:0] -> [80000014:3:2:0:0] 2025-07-08T12:01:36.061215Z 13 05h45m00.122016s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-07-08T12:01:36.061223Z 13 05h45m00.122016s :BS_NODE DEBUG: [13] VDiskId# [80000014:2:1:0:0] -> [80000014:3:1:0:0] 2025-07-08T12:01:36.061237Z 31 05h45m00.122016s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-07-08T12:01:36.061245Z 31 05h45m00.122016s :BS_NODE DEBUG: [31] VDiskId# [80000014:2:2:2:0] -> [80000014:3:2:2:0] 2025-07-08T12:01:36.061258Z 16 05h45m00.122016s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.061359Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:71} Reassigner starting GroupId# 2147483652 2025-07-08T12:01:36.061528Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061538Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:0:0:0] DiskIsOk# true 2025-07-08T12:01:36.061609Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061615Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:0:1:0] DiskIsOk# true 2025-07-08T12:01:36.061621Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061626Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:0:2:0] DiskIsOk# true 2025-07-08T12:01:36.061631Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061638Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:1:0:0] DiskIsOk# true 2025-07-08T12:01:36.061644Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061648Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:1:2:0] DiskIsOk# true 2025-07-08T12:01:36.061653Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061657Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:2:0:0] DiskIsOk# true 2025-07-08T12:01:36.061662Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061666Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:2:1:0] DiskIsOk# true 2025-07-08T12:01:36.061671Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:111} Reassigner TEvVStatusResult GroupId# 2147483652 Status# OK JoinedGroup# true Replicated# true 2025-07-08T12:01:36.061676Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:96} Reassigner ProcessVDiskReply GroupId# 2147483652 VDiskId# [80000004:2:2:2:0] DiskIsOk# true 2025-07-08T12:01:36.065638Z 1 05h45m00.122528s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-07-08T12:01:36.065666Z 1 05h45m00.122528s :BS_NODE DEBUG: [1] VDiskId# [80000004:2:0:0:0] -> [80000004:3:0:0:0] 2025-07-08T12:01:36.065855Z 1 05h45m00.122528s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:206} Reassigner succeeded GroupId# 2147483652 Items# [80000004:2:1:1:0]: 16:1003:1000 -> 23:1001:1015 ConfigTxSeqNo# 539 2025-07-08T12:01:36.065867Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:217} Reassigner finished GroupId# 2147483652 Success# true 2025-07-08T12:01:36.065916Z 36 05h45m00.122528s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-07-08T12:01:36.065928Z 36 05h45m00.122528s :BS_NODE DEBUG: [36] VDiskId# [80000004:2:2:1:0] -> [80000004:3:2:1:0] 2025-07-08T12:01:36.065946Z 19 05h45m00.122528s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-07-08T12:01:36.065954Z 19 05h45m00.122528s :BS_NODE DEBUG: [19] VDiskId# [80000004:2:1:2:0] -> [80000004:3:1:2:0] 2025-07-08T12:01:36.065970Z 4 05h45m00.122528s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-07-08T12:01:36.065978Z 4 05h45m00.122528s :BS_NODE DEBUG: [4] VDiskId# [80000004:2:0:1:0] -> [80000004:3:0:1:0] 2025-07-08T12:01:36.065995Z 23 05h45m00.122528s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-07-08T12:01:36.066005Z 23 05h45m00.122528s :BS_NODE DEBUG: [23] VDiskId# [80000004:3:1:1:0] PDiskId# 1001 VSlotId# 1015 created 2025-07-08T12:01:36.066029Z 23 05h45m00.122528s :BS_NODE DEBUG: [23] VDiskId# [80000004:3:1:1:0] status changed to INIT_PENDING 2025-07-08T12:01:36.066048Z 7 05h45m00.122528s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-07-08T12:01:36.066058Z 7 05h45m00.122528s :BS_NODE DEBUG: [7] VDiskId# [80000004:2:0:2:0] -> [80000004:3:0:2:0] 2025-07-08T12:01:36.066074Z 25 05h45m00.122528s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-07-08T12:01:36.066083Z 25 05h45m00.122528s :BS_NODE DEBUG: [25] VDiskId# [80000004:2:2:0:0] -> [80000004:3:2:0:0] 2025-07-08T12:01:36.066099Z 13 05h45m00.122528s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-07-08T12:01:36.066108Z 13 05h45m00.122528s :BS_NODE DEBUG: [13] VDiskId# [80000004:2:1:0:0] -> [80000004:3:1:0:0] 2025-07-08T12:01:36.066123Z 31 05h45m00.122528s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-07-08T12:01:36.066136Z 31 05h45m00.122528s :BS_NODE DEBUG: [31] VDiskId# [80000004:2:2:2:0] -> [80000004:3:2:2:0] 2025-07-08T12:01:36.066149Z 16 05h45m00.122528s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.066493Z 23 05h45m01.549504s :BS_NODE DEBUG: [23] VDiskId# [80000024:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.066604Z 23 05h45m02.415456s :BS_NODE DEBUG: [23] VDiskId# [80000064:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.066715Z 23 05h45m02.974528s :BS_NODE DEBUG: [23] VDiskId# [80000004:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.066854Z 23 05h45m03.182016s :BS_NODE DEBUG: [23] VDiskId# [80000014:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.066972Z 23 05h45m04.427968s :BS_NODE DEBUG: [23] VDiskId# [80000054:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.067084Z 23 05h45m04.468992s :BS_NODE DEBUG: [23] VDiskId# [80000034:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.067202Z 23 05h45m04.590944s :BS_NODE DEBUG: [23] VDiskId# [80000074:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.067341Z 23 05h45m04.626480s :BS_NODE DEBUG: [23] VDiskId# [80000044:3:1:1:0] status changed to REPLICATING 2025-07-08T12:01:36.067679Z 23 05h45m13.960016s :BS_NODE DEBUG: [23] VDiskId# [80000014:3:1:1:0] status changed to READY 2025-07-08T12:01:36.069986Z 16 05h45m13.960528s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.070010Z 16 05h45m13.960528s :BS_NODE DEBUG: [16] VDiskId# [80000014:2:1:1:0] destroyed 2025-07-08T12:01:36.070075Z 23 05h45m14.832528s :BS_NODE DEBUG: [23] VDiskId# [80000004:3:1:1:0] status changed to READY 2025-07-08T12:01:36.072171Z 16 05h45m14.833040s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.072191Z 16 05h45m14.833040s :BS_NODE DEBUG: [16] VDiskId# [80000004:2:1:1:0] destroyed 2025-07-08T12:01:36.072433Z 23 05h45m15.973968s :BS_NODE DEBUG: [23] VDiskId# [80000054:3:1:1:0] status changed to READY 2025-07-08T12:01:36.074523Z 16 05h45m15.974480s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.074540Z 16 05h45m15.974480s :BS_NODE DEBUG: [16] VDiskId# [80000054:2:1:1:0] destroyed 2025-07-08T12:01:36.074590Z 23 05h45m17.109992s :BS_NODE DEBUG: [23] VDiskId# [80000034:3:1:1:0] status changed to READY 2025-07-08T12:01:36.076563Z 16 05h45m17.110504s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.076580Z 16 05h45m17.110504s :BS_NODE DEBUG: [16] VDiskId# [80000034:2:1:1:0] destroyed 2025-07-08T12:01:36.076700Z 23 05h45m21.292456s :BS_NODE DEBUG: [23] VDiskId# [80000064:3:1:1:0] status changed to READY 2025-07-08T12:01:36.078724Z 16 05h45m21.292968s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.078741Z 16 05h45m21.292968s :BS_NODE DEBUG: [16] VDiskId# [80000064:2:1:1:0] destroyed 2025-07-08T12:01:36.079105Z 23 05h45m30.261944s :BS_NODE DEBUG: [23] VDiskId# [80000074:3:1:1:0] status changed to READY 2025-07-08T12:01:36.080925Z 16 05h45m30.262456s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.080963Z 16 05h45m30.262456s :BS_NODE DEBUG: [16] VDiskId# [80000074:2:1:1:0] destroyed 2025-07-08T12:01:36.081184Z 23 05h45m36.090504s :BS_NODE DEBUG: [23] VDiskId# [80000024:3:1:1:0] status changed to READY 2025-07-08T12:01:36.083163Z 16 05h45m36.091016s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.083182Z 16 05h45m36.091016s :BS_NODE DEBUG: [16] VDiskId# [80000024:2:1:1:0] destroyed 2025-07-08T12:01:36.083285Z 23 05h45m37.750480s :BS_NODE DEBUG: [23] VDiskId# [80000044:3:1:1:0] status changed to READY 2025-07-08T12:01:36.085398Z 16 05h45m37.750992s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-07-08T12:01:36.085420Z 16 05h45m37.750992s :BS_NODE DEBUG: [16] VDiskId# [80000044:2:1:1:0] destroyed ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62073, MsgBus: 3494 2025-07-08T12:01:33.304038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680025671421754:2069];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:33.304054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5f/r3tmp/tmpm1Sv4Q/pdisk_1.dat 2025-07-08T12:01:33.413175Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:33.414042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:33.414057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:33.417253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62073, node 1 2025-07-08T12:01:33.453160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:33.453174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:33.453176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:33.453223Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3494 TClient is connected to server localhost:3494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:33.512670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.514475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:33.531589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:33.595355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.616862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.628025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:33.697918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.706915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.718279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.774798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.787355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.807874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.824595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:33.990217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.040691Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:34.042286Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:34.042332Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:34.042377Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680029966391660:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7524680025671424225:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7524680029966391660:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:34.042476Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680029966391648:2455], SessionActorId: [1:7524680025671424225:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7524680025671424225:2455]. isRollback=0 2025-07-08T12:01:34.042533Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ4Nzk5NS0zNjY3N2Q3YS03YTFhMTA4Ny04ZTk2MzFjOQ==, ActorId: [1:7524680025671424225:2455], ActorState: ExecuteState, TraceId: 01jzmykaa05r8qcv283spqh4qd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7524680029966391649:2455] from: [1:7524680029966391648:2455] 2025-07-08T12:01:34.042628Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680029966391649:2455] TxId: 281474976710672. Ctx: { TraceId: 01jzmykaa05r8qcv283spqh4qd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ4Nzk5NS0zNjY3N2Q3YS03YTFhMTA4Ny04ZTk2MzFjOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:34.042685Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzQ4Nzk5NS0zNjY3N2Q3YS03YTFhMTA4Ny04ZTk2MzFjOQ==, ActorId: [1:7524680025671424225:2455], ActorState: ExecuteState, TraceId: 01jzmykaa05r8qcv283spqh4qd, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29080, MsgBus: 26155 2025-07-08T12:01:34.406195Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5f/r3tmp/tmp1gLKuX/pdisk_1.dat 2025-07-08T12:01:34.423143Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29080, node 2 2025-07-08T12:01:34.441240Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:34.441252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:34.441253Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:34.441307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26155 TClient is connected to server localhost:26155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:34.504881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:34.504912Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting waiting... 2025-07-08T12:01:34.505645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.507202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:34.550027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.572990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.598855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.610094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:34.744482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.755404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.765235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.772931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.786827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.801159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.814594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.961032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:34.999622Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:34.999677Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:34.999701Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:34.999753Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680029382284790:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7524680029382284651:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7524680029382284790:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:34.999767Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680029382284779:2455], SessionActorId: [2:7524680029382284651:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7524680029382284651:2455]. isRollback=0 2025-07-08T12:01:34.999808Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3NDYzMDQtNjg3NGQ3YjktYzc3ZTU2MWEtYWQ1ODhmZjE=, ActorId: [2:7524680029382284651:2455], ActorState: ExecuteState, TraceId: 01jzmykb8971zs4vhcsdbngtyy, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7524680029382284780:2455] from: [2:7524680029382284779:2455] 2025-07-08T12:01:34.999824Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7524680029382284780:2455] TxId: 281474976715672. Ctx: { TraceId: 01jzmykb8971zs4vhcsdbngtyy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y3NDYzMDQtNjg3NGQ3YjktYzc3ZTU2MWEtYWQ1ODhmZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:34.999867Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y3NDYzMDQtNjg3NGQ3YjktYzc3ZTU2MWEtYWQ1ODhmZjE=, ActorId: [2:7524680029382284651:2455], ActorState: ExecuteState, TraceId: 01jzmykb8971zs4vhcsdbngtyy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 17943, MsgBus: 29030 2025-07-08T12:01:35.373162Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680032703421567:2070];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:35.373218Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b5f/r3tmp/tmpKCvIDR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17943, node 3 2025-07-08T12:01:35.393852Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:35.394231Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:35.394237Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:35.394240Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:35.394285Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29030 TClient is connected to server localhost:29030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:35.476088Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:35.476122Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:35.478723Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:35.478978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:35.496642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:35.507884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.537688Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:35.552791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:35.702662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.710249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.724362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.738921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.753990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.767325Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.780862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:35.925299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.041954Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680036998391436:2472], TxId: 281474976715674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=M2Y1YTUwNTctZGM0MjVmMjAtZTk3YTY3MmMtZTEwZDVjMjQ=. CustomerSuppliedId : . TraceId : 01jzmykc818q18bb39g7vcv367. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:36.042034Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680036998391437:2473], TxId: 281474976715674, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=M2Y1YTUwNTctZGM0MjVmMjAtZTk3YTY3MmMtZTEwZDVjMjQ=. TraceId : 01jzmykc818q18bb39g7vcv367. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [3:7524680036998391433:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:36.042094Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=M2Y1YTUwNTctZGM0MjVmMjAtZTk3YTY3MmMtZTEwZDVjMjQ=, ActorId: [3:7524680032703423990:2446], ActorState: ExecuteState, TraceId: 01jzmykc818q18bb39g7vcv367, Create QueryResponse for error on request, msg: ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldDisableAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:26.984572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.984594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.984598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.984601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.984613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.984616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.984628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.984640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.984706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.992935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.992972Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.996155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.996197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.996225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.998008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.998206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.998316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.998389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.998885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.998934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.999173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.999208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.999213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.999247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:27.000357Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:27.017408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:27.017474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.017517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:27.017576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:27.017586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.018232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.018254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:27.018301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.018308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:27.018311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:27.018314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:27.018763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.018773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:27.018777Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:27.018997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.019003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.019006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.019010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.019425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:27.019695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:27.019715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:27.019836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.019853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:27.019858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.019892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:27.019899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.019913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:27.019929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:27.020193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:27.020198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:27.020218Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:27.020222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:27.020258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.020263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:27.020269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:27.020271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.020274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:27.020276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.020278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:27.020281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 8944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.821011Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:35.821015Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-07-08T12:01:35.821021Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:35.821225Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.821240Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.821243Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:35.821247Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-07-08T12:01:35.821255Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:35.821269Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-07-08T12:01:35.822489Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-07-08T12:01:35.822518Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000007 2025-07-08T12:01:35.822618Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:35.822639Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 120259086445 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:35.822645Z node 28 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000007, at schemeshard: 72057594046678944 2025-07-08T12:01:35.822669Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T12:01:35.822677Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:35.822680Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:35.822686Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:35.822689Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:35.822696Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:35.822704Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:35.822710Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T12:01:35.822716Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:35.822720Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T12:01:35.822723Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T12:01:35.822734Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:35.822739Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-07-08T12:01:35.822743Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-07-08T12:01:35.822746Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-07-08T12:01:35.822954Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823007Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823305Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:35.823316Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:35.823341Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:35.823363Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:35.823367Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:208:2210], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-07-08T12:01:35.823372Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:208:2210], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T12:01:35.823537Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823548Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823552Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:35.823556Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-07-08T12:01:35.823560Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:35.823658Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823666Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.823669Z node 28 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:35.823673Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T12:01:35.823676Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:35.823685Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T12:01:35.823732Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:35.823737Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:35.823747Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:35.824201Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.824509Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:35.824530Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-07-08T12:01:35.824614Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:35.824642Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 35us result status StatusSuccess 2025-07-08T12:01:35.824729Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "export-1003" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710757 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPDiskTest::AllRequestsAreAnsweredOnPDiskRestart [GOOD] >> TPDiskTest::ChunkWriteDifferentOffsetAndSize >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> TPDiskTest::ChunkWriteDifferentOffsetAndSize [GOOD] >> TPDiskTest::ChunkWriteBadOffset >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] |69.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut >> TPDiskTest::ChunkWriteBadOffset [GOOD] >> KikimrIcGateway::TestListPath >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KqpWrite::UpsertNullKey |69.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:26.682063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.682086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.682091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.682095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.682108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.682112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.682129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.682141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.682215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.693453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.693473Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.696617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.696654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.696675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.700821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.701058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.701158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.701218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.701705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.701734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.701928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.701937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.701952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.701959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.701964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.702014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:26.703944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.723177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:26.723236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.723289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:26.723361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:26.723372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.723944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.723967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:26.724010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.724019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:26.724024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:26.724029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:26.724422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.724433Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:26.724438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:26.724740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.724747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.724753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.724769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.725357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:26.725735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:26.725776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:26.725965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.726001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:26.726008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.726060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:26.726071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.726095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:26.726106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:26.726531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.726540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.726571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.726576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:26.726628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.726635Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:26.726646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.726650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.726655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.726658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.726662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:26.726667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 94046678944, txId: 281474976710758 2025-07-08T12:01:36.979031Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:01:36.979035Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:36.979123Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.979132Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.979138Z node 37 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:36.979142Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T12:01:36.979145Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:36.979153Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:36.979502Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T12:01:36.979677Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:36.979683Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:36.979688Z node 37 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980031Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-07-08T12:01:36.980059Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:36.980123Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-07-08T12:01:36.980207Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980227Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 158913792108 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980234Z node 37 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980259Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980266Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:36.980270Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:36.980275Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:36.980279Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:36.980286Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:36.980294Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:36.980300Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-07-08T12:01:36.980306Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:36.980313Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-07-08T12:01:36.980316Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-07-08T12:01:36.980324Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:36.980328Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-07-08T12:01:36.980332Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-07-08T12:01:36.980335Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T12:01:36.980397Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.980776Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980786Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:36.980813Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T12:01:36.980833Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:36.980838Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-07-08T12:01:36.980842Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-07-08T12:01:36.980934Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.980969Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.980974Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:36.980978Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:01:36.980982Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:36.981045Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.981053Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.981056Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:36.981060Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T12:01:36.981066Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:36.981075Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-07-08T12:01:36.981079Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [37:125:2151] 2025-07-08T12:01:36.981114Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:36.981119Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:36.981127Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:36.981567Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.981848Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:36.981877Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T12:01:36.981887Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-07-08T12:01:36.981898Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-07-08T12:01:36.981905Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T12:01:36.981908Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-07-08T12:01:36.981912Z node 37 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-07-08T12:01:36.982298Z node 37 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-07-08T12:01:36.982345Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-07-08T12:01:36.982352Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-07-08T12:01:36.982405Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:36.982421Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:36.982425Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [37:395:2384] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 14430, MsgBus: 7455 2025-07-08T12:01:36.167285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680036494212827:2068];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:36.167296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e22/r3tmp/tmpt4aUkx/pdisk_1.dat 2025-07-08T12:01:36.249662Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14430, node 1 2025-07-08T12:01:36.277253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:36.277289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:36.280926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:36.281265Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:36.281288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:36.281290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:36.281338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7455 TClient is connected to server localhost:7455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:36.356568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.375885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-07-08T12:01:36.378436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.382726Z node 1 :TX_PROXY ERROR: Actor# [1:7524680036494213451:2334] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132" severity: 1 } 2025-07-08T12:01:36.382772Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:132 Trying to start YDB, gRPC: 5187, MsgBus: 13763 2025-07-08T12:01:36.720054Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680037679158486:2136];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:36.720241Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e22/r3tmp/tmpgLOSkf/pdisk_1.dat 2025-07-08T12:01:36.734901Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5187, node 2 2025-07-08T12:01:36.745214Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:36.745228Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:36.745230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:36.745278Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13763 TClient is connected to server localhost:13763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:36.821413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.822495Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:36.823036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:36.823056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:36.824183Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:36.835053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-07-08T12:01:36.837924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32756, MsgBus: 25812 2025-07-08T12:01:37.254026Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680040832953515:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.254065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e22/r3tmp/tmpl1PrIu/pdisk_1.dat 2025-07-08T12:01:37.269657Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32756, node 3 2025-07-08T12:01:37.284260Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.284271Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.284272Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.284311Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25812 TClient is connected to server localhost:25812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:37.358644Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:37.358678Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:37.359096Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.359665Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:37.361568Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:37.366835Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 >> KqpScan::RightJoinSimple >> KqpScan::Grep >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] >> KqpSplit::AfterResultMultiRange+Unspecified >> KikimrIcGateway::TestCreateExternalTable >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool >> KqpWrite::UpsertNullKey [GOOD] >> KqpWrite::ProjectReplace-UseSink >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpScan::GrepByString >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> KqpScan::Grep [GOOD] >> KqpScan::FullFrameWindow >> DataStreams::TestUpdateStorage >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] >> KikimrIcGateway::TestDropResourcePool [GOOD] >> DataStreams::TestStreamStorageRetention ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:26.964472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.964491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.964496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.964500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.964512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.964515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.964534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.964545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.964616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.976291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.976311Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.981406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.981451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.981475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.983386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.983539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.983656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.983711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.984286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.984324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.984541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.984553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.984569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.984578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.984583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.984637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:26.986525Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:27.006143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:27.006215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.006264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:27.006311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:27.006319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.007242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.007266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:27.007313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.007320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:27.007323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:27.007327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:27.008460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.008471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:27.008475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:27.009193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.009208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.009215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.009222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.009798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:27.010452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:27.010482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:27.010616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:27.010635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:27.010640Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.010679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:27.010686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:27.010705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:27.010713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:27.011361Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:27.011372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:27.011413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:27.011419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:27.011487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:27.011494Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:27.011506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:27.011511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.011514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:27.011517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:27.011521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:27.011526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 94046678944, txId: 281474976710758 2025-07-08T12:01:38.036435Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:01:38.036440Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:38.036597Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.036609Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.036616Z node 40 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:38.036620Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T12:01:38.036625Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:38.036636Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:38.037182Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T12:01:38.037209Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037215Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:38.037219Z node 40 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037535Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-07-08T12:01:38.037560Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-07-08T12:01:38.037835Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.037870Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037888Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 171798693997 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037896Z node 40 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037917Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-07-08T12:01:38.037926Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:38.037930Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:38.037935Z node 40 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:38.037938Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:38.037945Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:38.037954Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:38.037959Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-07-08T12:01:38.037966Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:38.037973Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-07-08T12:01:38.037977Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-07-08T12:01:38.037986Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:38.037991Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-07-08T12:01:38.037995Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-07-08T12:01:38.037999Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T12:01:38.038139Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.038495Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:38.038504Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:38.038529Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T12:01:38.038549Z node 40 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:38.038554Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-07-08T12:01:38.038559Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [40:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-07-08T12:01:38.038668Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.038679Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.038683Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:38.038688Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:01:38.038692Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:38.038763Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.038771Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.038775Z node 40 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:38.038778Z node 40 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T12:01:38.038784Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:38.038794Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-07-08T12:01:38.038798Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [40:126:2152] 2025-07-08T12:01:38.038878Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:38.038884Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:38.038892Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:38.039472Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.039562Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:38.039579Z node 40 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T12:01:38.039589Z node 40 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-07-08T12:01:38.039598Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-07-08T12:01:38.039604Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T12:01:38.039608Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-07-08T12:01:38.039613Z node 40 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-07-08T12:01:38.040019Z node 40 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-07-08T12:01:38.040067Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-07-08T12:01:38.040074Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-07-08T12:01:38.040124Z node 40 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:38.040139Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:38.040156Z node 40 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [40:399:2388] TestWaitNotification: OK eventTxId 1003 >> KqpScan::RightJoinSimple [GOOD] >> KqpScan::RightOnlyJoinSimple >> DataStreams::TestGetShardIterator >> KqpSplit::AfterResultMultiRange+Unspecified [GOOD] >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2857, MsgBus: 29894 2025-07-08T12:01:35.954262Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680033782040380:2066];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:35.954283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b58/r3tmp/tmpj8TsU8/pdisk_1.dat 2025-07-08T12:01:36.022993Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2857, node 1 2025-07-08T12:01:36.041384Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:36.041403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:36.041406Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:36.041458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:36.056225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:36.056256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:36.057346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29894 TClient is connected to server localhost:29894 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:36.102184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.119575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.137050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.163832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.180597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.331526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.340350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.351188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.406407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.462434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.475158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.490299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.648533Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2025-07-08T12:01:36.650045Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:36.650091Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2025-07-08T12:01:36.650154Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680038077010179:2455], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7524680038077010155:2455]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7524680038077010179:2455].{
: Error: Conflict with existing key., code: 2012 } 2025-07-08T12:01:36.650272Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7524680038077010171:2455], SessionActorId: [1:7524680038077010155:2455], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7524680038077010155:2455]. isRollback=0 2025-07-08T12:01:36.650462Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTVmODIwOWItZDM4MDM4MzgtODFiZjM3MWItYzZhNjBmODE=, ActorId: [1:7524680038077010155:2455], ActorState: ExecuteState, TraceId: 01jzmykcvva1t5mr8r8ssese2e, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7524680038077010172:2455] from: [1:7524680038077010171:2455] 2025-07-08T12:01:36.650562Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680038077010172:2455] TxId: 281474976715670. Ctx: { TraceId: 01jzmykcvva1t5mr8r8ssese2e, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVmODIwOWItZDM4MDM4MzgtODFiZjM3MWItYzZhNjBmODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } 2025-07-08T12:01:36.650602Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTVmODIwOWItZDM4MDM4MzgtODFiZjM3MWItYzZhNjBmODE=, ActorId: [1:7524680038077010155:2455], ActorState: ExecuteState, TraceId: 01jzmykcvva1t5mr8r8ssese2e, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 24087, MsgBus: 23524 2025-07-08T12:01:37.039561Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680044039449075:2065];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.040349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b58/r3tmp/tmpdkwp5n/pdisk_1.dat 2025-07-08T12:01:37.058898Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24087, node 2 2025-07-08T12:01:37.085705Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.085718Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.085720Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.085758Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23524 TClient is connected to server localhost:23524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:37.147076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:37.147104Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:37.147356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.148608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:37.148832Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.152080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.166910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.187451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.200639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.431467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.440095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.454438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.468342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.482119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.500944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.511221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.697979Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680044039451553:2456], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jzmykdvw93ra9n6w7fc3cmrw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZjIyZWViYWMtMTMyMjI1ZTItMzkyMDRiZjctYmU0N2RmYw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-07-08T12:01:37.698064Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7524680044039451554:2457], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjIyZWViYWMtMTMyMjI1ZTItMzkyMDRiZjctYmU0N2RmYw==. TraceId : 01jzmykdvw93ra9n6w7fc3cmrw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [2:7524680044039451550:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:37.698140Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjIyZWViYWMtMTMyMjI1ZTItMzkyMDRiZjctYmU0N2RmYw==, ActorId: [2:7524680044039451507:2446], ActorState: ExecuteState, TraceId: 01jzmykdvw93ra9n6w7fc3cmrw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 6744, MsgBus: 19974 2025-07-08T12:01:37.994980Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680041820147417:2240];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.995040Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b58/r3tmp/tmpT4xLTP/pdisk_1.dat 2025-07-08T12:01:38.009574Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6744, node 3 2025-07-08T12:01:38.021350Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.021361Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.021365Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.021408Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19974 TClient is connected to server localhost:19974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.098134Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.098164Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.098521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.099102Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:38.101431Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.109655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.170049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.189513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.202150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.342074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.398629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.407303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.423302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.435423Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.454072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.464792Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.661966Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680046115116998:2456], TxId: 281474976715671, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZGIzYmMzMjItYzhjN2ZiZTktNTA5MmIyZGMtOTJkY2JjN2U=. CustomerSuppliedId : . TraceId : 01jzmykess8fby3rmcaerfdpk9. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-07-08T12:01:38.662045Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680046115116999:2457], TxId: 281474976715671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=ZGIzYmMzMjItYzhjN2ZiZTktNTA5MmIyZGMtOTJkY2JjN2U=. TraceId : 01jzmykess8fby3rmcaerfdpk9. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [3:7524680046115116995:2446], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-07-08T12:01:38.662106Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGIzYmMzMjItYzhjN2ZiZTktNTA5MmIyZGMtOTJkY2JjN2U=, ActorId: [3:7524680046115116962:2446], ActorState: ExecuteState, TraceId: 01jzmykess8fby3rmcaerfdpk9, Create QueryResponse for error on request, msg: >> KqpWrite::ProjectReplace-UseSink [GOOD] >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 30236, MsgBus: 16485 2025-07-08T12:01:37.220887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680040994947224:2059];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.221047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000df6/r3tmp/tmpMWOlGX/pdisk_1.dat 2025-07-08T12:01:37.290528Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30236, node 1 2025-07-08T12:01:37.304290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.304306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.304308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.304359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16485 TClient is connected to server localhost:16485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:37.359315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:37.359337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:37.361860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:37.367483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.370890Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:37.385330Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-07-08T12:01:37.583823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.643316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.651258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.663369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5016, MsgBus: 29047 2025-07-08T12:01:37.965488Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680042270500534:2064];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.965509Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000df6/r3tmp/tmpPOIPfz/pdisk_1.dat 2025-07-08T12:01:37.979646Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5016, node 2 2025-07-08T12:01:37.990390Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.990403Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.990405Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.990438Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29047 TClient is connected to server localhost:29047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.070659Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.070688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.070986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.072251Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:38.073842Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.307081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.322731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.346312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.357786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480
: Info: Success, code: 4 2025-07-08T12:01:38.414007Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found Trying to start YDB, gRPC: 13209, MsgBus: 22187 2025-07-08T12:01:38.691372Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680045093750052:2062];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.691398Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000df6/r3tmp/tmpZwWVGL/pdisk_1.dat 2025-07-08T12:01:38.706846Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13209, node 3 2025-07-08T12:01:38.722506Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.722523Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.722525Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.722578Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22187 TClient is connected to server localhost:22187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.795884Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.795918Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.796316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.796926Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:38.801147Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.854283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestALterResourcePool [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] >> KqpScan::GrepByString [GOOD] >> KqpScan::GrepLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_vdisk/unittest >> TBsVDiskGC::TGCManyVPutsCompactGCAllTest [GOOD] Test command err: 2025-07-08T11:54:24.733424Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:24.748623Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:24.754069Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696900Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696903Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696904Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696925Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696926Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696936Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696937Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696959Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:25.696979Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 450} Hard# } 2025-07-08T11:54:27.381755Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381755Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381760Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:322:0:0:10:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381784Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381784Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381788Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381793Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381799Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381802Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381808Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:370:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381809Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:472:0:0:4:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381810Z :BS_HULLRECS CRIT: VDISK[0:_:0:0:0]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:1] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381821Z :BS_HULLRECS CRIT: VDISK[0:_:0:1:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:2] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381831Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:424:0:0:3:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } 2025-07-08T11:54:27.381841Z :BS_HULLRECS CRIT: VDISK[0:_:0:2:1]: Db# LogoBlobs; putting blob beyond the barrier id# [5000:1:915:0:0:5:3] barrier# {Soft# {Gen# 1 Step# 1000} Hard# } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11093, MsgBus: 20589 2025-07-08T12:01:37.803462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680040482696288:2071];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.803708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b52/r3tmp/tmpWeaOrJ/pdisk_1.dat 2025-07-08T12:01:37.873204Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11093, node 1 2025-07-08T12:01:37.897847Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.897860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.897861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.897905Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:37.904337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:37.904366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:37.905435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20589 TClient is connected to server localhost:20589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:37.973874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.976570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:37.989718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.018760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.081109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.100877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.193298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.200743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.210654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.225312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.238961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.251897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.266752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26535, MsgBus: 30181 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b52/r3tmp/tmpL5PLmn/pdisk_1.dat 2025-07-08T12:01:38.756435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:38.764479Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26535, node 2 2025-07-08T12:01:38.781141Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.781158Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.781160Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.781205Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30181 TClient is connected to server localhost:30181 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.855468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.855503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.855926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.858083Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.858345Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:38.880339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.945327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.981102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.006324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.101454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.112034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.120524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.148311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.175158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.186418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.197450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> KqpScan::RightOnlyJoinSimple [GOOD] >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleView [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:26.957420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.957441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.957446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.957450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.957463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.957467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.957490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.957501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.957587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.969831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.969853Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.974841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.974887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.974910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.976641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.976798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.976907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.976979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.977443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.977476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.977677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.977689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.977704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.977714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.977719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.977752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:26.978721Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.996096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:26.996165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.996213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:26.996273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:26.996284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.996930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.996968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:26.997014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.997025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:26.997030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:26.997035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:26.997458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.997470Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:26.997475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:26.997830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.997840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.997845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.997851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.998407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:26.998796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:26.998828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:26.998999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.999086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:26.999093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.999113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:26.999124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:26.999529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.999567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:26.999622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.999629Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:26.999637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.999641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.999645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.999648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.999651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:26.999656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 94046678944, txId: 281474976710758 2025-07-08T12:01:39.010530Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-07-08T12:01:39.010534Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:39.010634Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.010644Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.010648Z node 41 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:39.010651Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-07-08T12:01:39.010655Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:39.010663Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:39.011141Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-07-08T12:01:39.011171Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011175Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-07-08T12:01:39.011180Z node 41 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011307Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-07-08T12:01:39.011332Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2025-07-08T12:01:39.011384Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-07-08T12:01:39.011543Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011563Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 176093661293 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011569Z node 41 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011591Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-07-08T12:01:39.011599Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:39.011602Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:39.011607Z node 41 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-07-08T12:01:39.011610Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:39.011617Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:39.011625Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:39.011630Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-07-08T12:01:39.011636Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-07-08T12:01:39.011639Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-07-08T12:01:39.011643Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-07-08T12:01:39.011651Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-07-08T12:01:39.011655Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-07-08T12:01:39.011659Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-07-08T12:01:39.011662Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-07-08T12:01:39.011820Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-07-08T12:01:39.012142Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:39.012150Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:39.012177Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-07-08T12:01:39.012198Z node 41 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:39.012203Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-07-08T12:01:39.012208Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [41:206:2208], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 2025-07-08T12:01:39.012291Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.012300Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.012304Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:39.012308Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-07-08T12:01:39.012312Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-07-08T12:01:39.012383Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.012391Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.012394Z node 41 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-07-08T12:01:39.012398Z node 41 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-07-08T12:01:39.012401Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-07-08T12:01:39.012409Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-07-08T12:01:39.012414Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [41:125:2151] 2025-07-08T12:01:39.012475Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:39.012480Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-07-08T12:01:39.012488Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:39.012787Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.013043Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-07-08T12:01:39.013067Z node 41 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-07-08T12:01:39.013076Z node 41 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-07-08T12:01:39.013086Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-07-08T12:01:39.013092Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-07-08T12:01:39.013095Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-07-08T12:01:39.013099Z node 41 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 1003, itemIdx# 4294967295 2025-07-08T12:01:39.013370Z node 41 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 1003 2025-07-08T12:01:39.013419Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-07-08T12:01:39.013426Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-07-08T12:01:39.013476Z node 41 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:39.013490Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:39.013495Z node 41 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [41:403:2392] TestWaitNotification: OK eventTxId 1003 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/pdisk/ut/unittest >> TPDiskTest::ChunkWriteBadOffset [GOOD] Test command err: 2025-07-08T11:54:30.912586Z node 1 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T11:54:30.913020Z node 1 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 14706415469632409179 MagicNextLogChunkReference: 13331014999582683454 MagicLogChunk: 15644141536406754926 MagicDataChunk: 12032274468301322393 MagicSysLogChunk: 10278435828226480988 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751975670886137 (2025-07-08T11:54:30.886137Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T11:54:30.914639Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T11:54:30.915489Z node 1 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T11:54:30.915711Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T11:54:30.916024Z node 1 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T11:54:30.917089Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [0:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 2 PDiskId# 1 2025-07-08T11:54:30.917434Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 4 vDiskId# [1:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 3 PDiskId# 1 2025-07-08T11:54:30.917782Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 5 vDiskId# [2:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 4 PDiskId# 1 2025-07-08T11:54:30.918154Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 6 vDiskId# [3:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 5 PDiskId# 1 2025-07-08T11:54:30.918402Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 7 vDiskId# [4:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 6 PDiskId# 1 2025-07-08T11:54:30.918625Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 8 vDiskId# [5:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 7 PDiskId# 1 2025-07-08T11:54:30.918866Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 9 vDiskId# [6:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 8 PDiskId# 1 2025-07-08T11:54:30.919175Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 10 vDiskId# [7:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 9 PDiskId# 1 2025-07-08T11:54:30.919440Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 11 vDiskId# [8:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 10 PDiskId# 1 2025-07-08T11:54:30.920900Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 12 vDiskId# [9:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 11 PDiskId# 1 2025-07-08T11:54:30.921353Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 13 vDiskId# [a:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 12 PDiskId# 1 2025-07-08T11:54:30.921575Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 14 vDiskId# [b:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 13 PDiskId# 1 2025-07-08T11:54:30.921813Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 15 vDiskId# [c:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 14 PDiskId# 1 2025-07-08T11:54:30.922023Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 16 vDiskId# [d:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 15 PDiskId# 1 2025-07-08T11:54:30.922230Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 17 vDiskId# [e:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 16 PDiskId# 1 2025-07-08T11:54:30.922414Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 18 vDiskId# [f:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 17 PDiskId# 1 2025-07-08T11:54:30.922583Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 19 vDiskId# [10:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 18 PDiskId# 1 2025-07-08T11:54:30.922804Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 20 vDiskId# [11:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 19 PDiskId# 1 2025-07-08T11:54:30.923006Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 21 vDiskId# [12:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 20 PDiskId# 1 2025-07-08T11:54:30.923176Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 22 vDiskId# [13:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 21 PDiskId# 1 2025-07-08T11:54:30.924830Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 23 vDiskId# [14:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 22 PDiskId# 1 2025-07-08T11:54:30.925338Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 24 vDiskId# [15:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 23 PDiskId# 1 2025-07-08T11:54:30.925622Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 25 vDiskId# [16:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 24 PDiskId# 1 2025-07-08T11:54:30.925876Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 26 vDiskId# [17:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 25 PDiskId# 1 2025-07-08T11:54:30.926089Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 27 vDiskId# [18:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 26 PDiskId# 1 2025-07-08T11:54:30.926321Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 28 vDiskId# [19:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 27 PDiskId# 1 2025-07-08T11:54:30.926593Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 29 vDiskId# [1a:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 28 PDiskId# 1 2025-07-08T11:54:30.926847Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 30 vDiskId# [1b:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 29 PDiskId# 1 2025-07-08T11:54:30.927153Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 31 vDiskId# [1c:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 30 PDiskId# 1 2025-07-08T11:54:30.927410Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 32 vDiskId# [1d:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 31 PDiskId# 1 2025-07-08T11:54:30.927649Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 33 vDiskId# [1e:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 32 PDiskId# 1 2025-07-08T11:54:30.927892Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 34 vDiskId# [1f:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 33 PDiskId# 1 2025-07-08T11:54:30.928137Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 35 vDiskId# [20:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 34 PDiskId# 1 2025-07-08T11:54:30.928580Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 36 vDiskId# [21:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 35 PDiskId# 1 2025-07-08T11:54:30.928812Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 37 vDiskId# [22:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 36 PDiskId# 1 2025-07-08T11:54:30.929170Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 38 vDiskId# [23:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 37 PDiskId# 1 2025-07-08T11:54:30.929437Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 39 vDiskId# [24:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 38 PDiskId# 1 2025-07-08T11:54:30.929743Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 40 vDiskId# [25:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 39 PDiskId# 1 2025-07-08T11:54:30.930043Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 41 vDiskId# [26:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 40 PDiskId# 1 2025-07-08T11:54:30.930344Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 42 vDiskId# [27:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 41 PDiskId# 1 2025-07-08T11:54:30.930659Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 43 vDiskId# [28:_:0:0:0] FirstNonceToKeep# 1643702 CutLogId# [1:7524678208014103650:2050] ownerRound# 42 PDiskId# 1 2025-07-08T11:54:30.930981Z node 1 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created owne ... P01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 7049306535461248393 MagicNextLogChunkReference: 7444385882395829116 MagicLogChunk: 9020623312779092305 MagicDataChunk: 7329894902946573864 MagicSysLogChunk: 8631123748682933632 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751976096906407 (2025-07-08T12:01:36.906407Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T12:01:36.925417Z node 1569 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T12:01:36.926209Z node 1569 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T12:01:36.926232Z node 1569 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T12:01:36.926749Z node 1569 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T12:01:36.926814Z node 1569 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [1eeec:_:0:0:0] FirstNonceToKeep# 1193037 CutLogId# [1569:7524680039985581623:2050] ownerRound# 128269 PDiskId# 1 2025-07-08T12:01:36.927151Z node 1569 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 seed# 1751976096927766 2025-07-08T12:01:37.093610Z node 1570 :BS_PDISK NOTICE: {BPD38@blobstorage_pdisk_impl.cpp:2671} OnDriveStartup Path# "" PDiskId# 1 2025-07-08T12:01:37.093836Z node 1570 :BS_PDISK NOTICE: {BSP01@blobstorage_pdisk_actor.cpp:565} Successfully read format record Format# {TDiskFormat Version: 3 DiskSize: 134217728000 bytes (134 GB) Guid: 8875617534063245611 MagicNextLogChunkReference: 1293109415413162245 MagicLogChunk: 10199259222487812426 MagicDataChunk: 1586706027952377205 MagicSysLogChunk: 4651484292169551278 MagicFormatChunk: 17332287817462050952 ChunkSize: 136314880 bytes (136 MB) SectorSize: 4096 SysLogSectorCount: 64 SystemChunkCount: 1 FormatText: "Info" DiskFormatSize: 1168 (current sizeof: 1168) TimestampUs: 1751976097074877 (2025-07-08T12:01:37.074877Z) FormatFlags: {ErasureEncodeSysLog | ErasureEncodeFormat | ErasureEncodeNextChunkReference | EncryptFormat | EncryptData}} PDiskId# 1 2025-07-08T12:01:37.094994Z node 1570 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:252} SysLogRecord is read Record# {TSysLogRecord Version# 0 NonceSet# {TNonceSet Version# 0 NonceSysLog# 61 NonceLog# 1 NonceData# 1} LogHeadChunkIdx# 1 LogHeadChunkPreviousNonce# 0} PDiskId# 1 2025-07-08T12:01:37.095643Z node 1570 :BS_PDISK NOTICE: {LR018@blobstorage_pdisk_logreader.cpp:810} PDiskId# 1 LogReader IsInitial# 1 ChunkIdx# 1 SectorIdx# 0 OffsetInSector# 0 In ProcessSectorSet got !restorator.GoodSectorFlags LastGoodToWriteLogPosition# { ChunkIdx# 1 OffsetInChunk# 0} PDiskId# 1 2025-07-08T12:01:37.095667Z node 1570 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 0 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 1 OffsetInChunk# 0} isEndOfLog# true StatusFlags# IsValid | DiskSpaceCyan | DiskSpaceLightYellowMove | DiskSpaceYellowStop | DiskSpaceLightOrange | DiskSpacePreOrange | DiskSpaceOrange | DiskSpaceRed | DiskSpaceBlack Results.size# 0} PDiskId# 1 2025-07-08T12:01:37.095898Z node 1570 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1681} PDisk have successfully started PDiskId# 1 2025-07-08T12:01:37.096030Z node 1570 :BS_PDISK NOTICE: {BPD02@blobstorage_pdisk_impl.cpp:1941} New owner is created ownerId# 3 vDiskId# [1eeed:_:0:0:0] FirstNonceToKeep# 1561290 CutLogId# [1570:7524680040080055355:2050] ownerRound# 128270 PDiskId# 1 2025-07-08T12:01:37.096282Z node 1570 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_logreader.cpp:1176} Reply to owner OwnerId# 3 Result# {EvReadLogResult Status# OK ErrorReason# "" position# { ChunkIdx# 0 OffsetInChunk# 0} nextPosition# { ChunkIdx# 4294967295 OffsetInChunk# 4294967295} isEndOfLog# true StatusFlags# IsValid Results.size# 0} PDiskId# 1 seed# 1751976097096591 2025-07-08T12:01:37.096635Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096677Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 197 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096711Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 322 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096740Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 485 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096771Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 580 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096800Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 659 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096831Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 724 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096867Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 890 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096892Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1064 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096915Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1097 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096936Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1098 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.096982Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1217 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097060Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1351 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097119Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1361 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097160Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1481 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097225Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1494 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097285Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1679 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097346Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1816 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097398Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 1903 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097451Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2022 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097516Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2089 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097589Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2202 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097658Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2228 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097724Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2400 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097820Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2501 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097877Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2573 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097941Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2671 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.097983Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 2851 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098012Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3024 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098057Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3164 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098084Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3180 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098112Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3367 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098139Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3552 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098166Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3580 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098192Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3769 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098256Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3813 ownerId# 3 PDiskId# 1 2025-07-08T12:01:37.098362Z node 1570 :BS_PDISK ERROR: {PBD23@blobstorage_pdisk_impl.cpp:903} PDiskId# 1 Can't write chunkIdx# 2 with not aligned offset# 3965 ownerId# 3 PDiskId# 1 |69.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |69.8%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |69.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 8375, MsgBus: 1547 2025-07-08T12:01:38.487794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680046565817377:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.487822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000dd0/r3tmp/tmp7VdgR8/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8375, node 1 2025-07-08T12:01:38.566686Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-07-08T12:01:38.568041Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-07-08T12:01:38.568288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-07-08T12:01:38.573131Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.573143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.573145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.573190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:38.589423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.589458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.591007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1547 TClient is connected to server localhost:1547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.661682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.673213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-07-08T12:01:38.678680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9954, MsgBus: 14412 2025-07-08T12:01:39.067414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680051559131630:2225];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000dd0/r3tmp/tmpebBzAf/pdisk_1.dat 2025-07-08T12:01:39.071475Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:01:39.081004Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9954, node 2 2025-07-08T12:01:39.099150Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.099167Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.099169Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.099231Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14412 TClient is connected to server localhost:14412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.172323Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.172360Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.172859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.173382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:39.177190Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:39.195271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14102, MsgBus: 13131 2025-07-08T12:01:39.579812Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680051666545122:2060];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.579844Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000dd0/r3tmp/tmp9wLnPY/pdisk_1.dat 2025-07-08T12:01:39.626786Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14102, node 3 2025-07-08T12:01:39.650105Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.650117Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.650119Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.650163Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13131 2025-07-08T12:01:39.685446Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.685472Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.685967Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.702108Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.706337Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:39.711987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-07-08T12:01:39.715399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::RightOnlyJoinSimple [GOOD] Test command err: Trying to start YDB, gRPC: 26656, MsgBus: 19218 2025-07-08T12:01:38.235269Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680048460539910:2136];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.235344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010db/r3tmp/tmp2GJhP5/pdisk_1.dat 2025-07-08T12:01:38.309438Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26656, node 1 2025-07-08T12:01:38.329444Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.329459Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.329461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.329505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:38.337257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.337284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.338814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19218 TClient is connected to server localhost:19218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:38.398022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.409153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.471915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.533510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:38.556057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.698957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.709983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.720212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.734648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.749513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.763108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.776905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.991172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.090365Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680052755509883:2468] TxId: 281474976710673. Ctx: { TraceId: 01jzmykf6yeqf9r9bgreadsxaj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRjNWY0ZTgtZjY1M2QwNmEtNzg3OGMzZTgtMmFiOTM0NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:39.096325Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976099132, txId: 281474976710672] shutting down Trying to start YDB, gRPC: 6132, MsgBus: 11142 2025-07-08T12:01:39.384553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680049041717201:2065];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.385034Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010db/r3tmp/tmp2jleCb/pdisk_1.dat 2025-07-08T12:01:39.399731Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6132, node 2 2025-07-08T12:01:39.409905Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.409918Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.409919Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.409963Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11142 2025-07-08T12:01:39.488699Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.488733Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.489751Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:39.508626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.509751Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.517969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.574890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.593274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:39.608653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.742734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.751347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.763892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.778685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.792849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.805898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.823511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.986356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.076556Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976100119, txId: 281474976715672] shutting down >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> BasicUsage::PreferredDatabaseNoFallback [GOOD] |69.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] >> DataStreams::TestControlPlaneAndMeteringData >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |69.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |69.9%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestUpdateStream >> KqpScan::FullFrameWindow [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser >> KqpScan::GrepLimit [GOOD] >> KqpScan::EmptySet ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_export_reboots_s3/unittest >> TExportToS3WithRebootsTests::ShouldSucceedAutoDropping [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:26.786737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:26.786764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.786770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:26.786775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:26.786789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:26.786793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:26.786811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:26.786825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:26.786915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:26.799088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:26.799116Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.805643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:26.805695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:26.805723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:26.809326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:26.809732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:26.809843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.809914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:26.810593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.810634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:26.810874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.810883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.810900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:26.810907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.810912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:26.810946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:26.823468Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:26.848978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:26.849055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.849116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:26.849182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:26.849265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.850914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.850986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:26.851047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.851059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:26.851067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:26.851073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:26.851679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.851695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:26.851700Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:26.852085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.852096Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.852102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.852108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.852692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:26.854441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:26.854488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:26.854709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:26.854739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:26.854747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.854800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:26.854809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:26.854838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:26.854849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:26.855335Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:26.855343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:26.855386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:26.855394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:26.855469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:26.855477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:26.855488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.855492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.855496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:26.855500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:26.855505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:26.855509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.803619Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.803623Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:39.803626Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2025-07-08T12:01:39.803630Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:39.803791Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.803801Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.803805Z node 34 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:39.803811Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-07-08T12:01:39.803832Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:39.803847Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 0/1, is published: true 2025-07-08T12:01:39.805444Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1004:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1004 msg type: 269090816 2025-07-08T12:01:39.805479Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1004, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1004 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1004 at step: 5000009 2025-07-08T12:01:39.805717Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:39.805744Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1004 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 146028890221 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:39.805752Z node 34 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 1004:0, step: 5000009, at schemeshard: 72057594046678944 2025-07-08T12:01:39.805784Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 1004:0, at schemeshard: 72057594046678944 2025-07-08T12:01:39.805795Z node 34 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:39.805800Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:39.805804Z node 34 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1004:0 progress is 1/1 2025-07-08T12:01:39.805808Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:39.805817Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:39.805827Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:39.805833Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1004, ready parts: 1/1, is published: false 2025-07-08T12:01:39.805841Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1004 ready parts: 1/1 2025-07-08T12:01:39.805845Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1004:0 2025-07-08T12:01:39.805849Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1004:0 2025-07-08T12:01:39.805862Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-07-08T12:01:39.805868Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1004, publications: 2, subscribers: 0 2025-07-08T12:01:39.805872Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2025-07-08T12:01:39.805875Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1004, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-07-08T12:01:39.806291Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.806412Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.806714Z node 34 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:39.806725Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:39.806761Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1004, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-07-08T12:01:39.806785Z node 34 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:39.806790Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [34:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 1 2025-07-08T12:01:39.806794Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [34:207:2209], at schemeshard: 72057594046678944, txId: 1004, path id: 2 FAKE_COORDINATOR: Erasing txId 1004 2025-07-08T12:01:39.806958Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.806970Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.806975Z node 34 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:39.806979Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-07-08T12:01:39.806984Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-07-08T12:01:39.807141Z node 34 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.807152Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.807156Z node 34 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1004 2025-07-08T12:01:39.807160Z node 34 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1004, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-07-08T12:01:39.807165Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-07-08T12:01:39.807178Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1004, subscribers: 0 2025-07-08T12:01:39.807211Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-07-08T12:01:39.807216Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-07-08T12:01:39.807226Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-07-08T12:01:39.807862Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.807977Z node 34 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1004 2025-07-08T12:01:39.808001Z node 34 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1004, wait until txId: 1004 2025-07-08T12:01:39.808093Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:39.808123Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 37us result status StatusSuccess 2025-07-08T12:01:39.808208Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScan::GrepNonKeyColumns >> DataStreams::TestReservedResourcesMetering >> DataStreams::TestGetRecordsStreamWithSingleShard >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> KqpScan::EmptySet [GOOD] >> DataStreams::TestDeleteStream >> DataStreams::ChangeBetweenRetentionModes >> DataStreams::TestGetRecordsWithCount >> IndexBuildTestReboots::CancelBuild [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KqpScan::GrepNonKeyColumns [GOOD] |69.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |69.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> DataStreams::TestShardPagination >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag |69.9%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_index_build_reboots/unittest >> IndexBuildTestReboots::CancelBuild [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:117:2058] recipient: [1:111:2143] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:127:2058] recipient: [1:109:2141] Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:132:2058] recipient: [1:110:2142] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:137:2058] recipient: [1:111:2143] 2025-07-08T12:01:33.994221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-07-08T12:01:33.994243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:33.994248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-07-08T12:01:33.994253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-07-08T12:01:33.994258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-07-08T12:01:33.994262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-07-08T12:01:33.994270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-07-08T12:01:33.994283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-07-08T12:01:33.994364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-07-08T12:01:34.004703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-07-08T12:01:34.004725Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:129:2154] sender: [1:177:2058] recipient: [1:15:2062] 2025-07-08T12:01:34.012370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-07-08T12:01:34.012449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-07-08T12:01:34.012475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-07-08T12:01:34.014236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-07-08T12:01:34.014385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-07-08T12:01:34.014457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:34.014497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-07-08T12:01:34.014839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:34.014869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Stop 2025-07-08T12:01:34.015025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:34.015030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:34.015043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-07-08T12:01:34.015048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:34.015052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-07-08T12:01:34.015075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:215:2058] recipient: [1:213:2214] Leader for TabletID 72057594037968897 is [1:219:2218] sender: [1:220:2058] recipient: [1:213:2214] 2025-07-08T12:01:34.016074Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:240:2058] recipient: [1:15:2062] 2025-07-08T12:01:34.031752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-07-08T12:01:34.031818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.031870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-07-08T12:01:34.031918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-07-08T12:01:34.031928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.041360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:34.041403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-07-08T12:01:34.041455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.041468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-07-08T12:01:34.041487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-07-08T12:01:34.041493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-07-08T12:01:34.042218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.042234Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-07-08T12:01:34.042240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-07-08T12:01:34.045804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.045824Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.045830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:34.045839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-07-08T12:01:34.046487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-07-08T12:01:34.047093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-07-08T12:01:34.047131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:255:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-07-08T12:01:34.047317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-07-08T12:01:34.047341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:34.047348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:34.047416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-07-08T12:01:34.047424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-07-08T12:01:34.047453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-07-08T12:01:34.047464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-07-08T12:01:34.047858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-07-08T12:01:34.047866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-07-08T12:01:34.047905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-07-08T12:01:34.047910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-07-08T12:01:34.047968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-07-08T12:01:34.047975Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-07-08T12:01:34.047984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:34.047988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:34.047992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-07-08T12:01:34.047995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-07-08T12:01:34.047999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-07-08T12:01:34.048003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TO ... -07-08T12:01:42.103359Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 1004 DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-07-08T12:01:42.103378Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 1004 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <1003> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 1004 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <1003> has been finished already" severity: 1 } TestWaitNotification wait txId: 1004 2025-07-08T12:01:42.103424Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: send EvNotifyTxCompletion 2025-07-08T12:01:42.103430Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1004 2025-07-08T12:01:42.103487Z node 20 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1004, at schemeshard: 72057594046678944 2025-07-08T12:01:42.103498Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: got EvNotifyTxCompletionResult 2025-07-08T12:01:42.103502Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1004: satisfy waiter [20:851:2793] TestWaitNotification: OK eventTxId 1004 TestWaitNotification wait txId: 1003 2025-07-08T12:01:42.103536Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-07-08T12:01:42.103539Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-07-08T12:01:42.103570Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion index build in-flight, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:42.103575Z node 20 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion, index build is ready to notify, txId: 1003, at schemeshard: 72057594046678944 2025-07-08T12:01:42.103581Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-07-08T12:01:42.103584Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [20:854:2796] TestWaitNotification: OK eventTxId 1003 2025-07-08T12:01:42.103626Z node 20 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-07-08T12:01:42.103680Z node 20 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:2 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 1003 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:2 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/dir/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-07-08T12:01:42.103751Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-07-08T12:01:42.103799Z node 20 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table" took 52us result status StatusSuccess 2025-07-08T12:01:42.103912Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table" PathDescription { Self { Name: "Table" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:42.103988Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-07-08T12:01:42.104023Z node 20 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/Table/index1" took 37us result status StatusSuccess 2025-07-08T12:01:42.104179Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/Table/index1" PathDescription { Self { Name: "index1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000005 ParentPathId: 5 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-07-08T12:01:42.104240Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 1005 DatabaseName: "/MyRoot" IndexBuildId: 1003 2025-07-08T12:01:42.104275Z node 20 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 1005 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 1005 Status: SUCCESS ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 25524, MsgBus: 16822 2025-07-08T12:01:36.494947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680037744824110:2211];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:36.495007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e1a/r3tmp/tmpJLBBPQ/pdisk_1.dat 2025-07-08T12:01:36.557228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25524, node 1 2025-07-08T12:01:36.573145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:36.573158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:36.573160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:36.573198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16822 2025-07-08T12:01:36.595034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:36.595061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:36.596248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:36.626744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:36.635587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.654605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.675943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.688586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:36.880716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.889200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.900759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.914547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.928750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.943041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:36.998334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.157654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.160000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63397, MsgBus: 16585 2025-07-08T12:01:37.489032Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680042016190692:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:37.489648Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e1a/r3tmp/tmposnDwg/pdisk_1.dat 2025-07-08T12:01:37.507876Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63397, node 2 2025-07-08T12:01:37.521018Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:37.521032Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:37.521036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:37.521089Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16585 TClient is connected to server localhost:16585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:37.598208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:37.598250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:37.598667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.599241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:37.600228Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:37.613948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.634178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.664419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.680185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:37.830818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.838483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.852944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.868146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.881251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.895881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:37.909980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.491875Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:38.493446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:38.593534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.657768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:38.742513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.811659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.882170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.964197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:38.981611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.305572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715703:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.309107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9776, MsgBus: 18631 2025-07-08T12:01:39.772501Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680051844910628:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.772547Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/000e1a/r3tmp/tmp7Hq3gn/pdisk_1.dat 2025-07-08T12:01:39.809239Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9776, node 3 2025-07-08T12:01:39.816668Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.816680Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.816682Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.816732Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18631 TClient is connected to server localhost:18631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.876679Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.876714Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.877166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.877732Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:39.880996Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:39.905708Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.917122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.937405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:39.950933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.132626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.140795Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.149147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.163202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.176595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.192360Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.205812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.774005Z node 3 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:40.776002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:2, at schemeshard: 72057594046644480 2025-07-08T12:01:40.882665Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.959520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-07-08T12:01:41.087950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.185542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.265072Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.334384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-07-08T12:01:41.349907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.934405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 |69.9%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::GrepNonKeyColumns [GOOD] Test command err: Trying to start YDB, gRPC: 9929, MsgBus: 4233 2025-07-08T12:01:38.989241Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680046150531146:2077];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.990874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010d7/r3tmp/tmp22sauc/pdisk_1.dat 2025-07-08T12:01:39.069399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9929, node 1 2025-07-08T12:01:39.089211Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.089225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.089227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.089267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:39.092801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.092837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.094009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4233 TClient is connected to server localhost:4233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.154474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.161194Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:39.182411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.261917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.287185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-07-08T12:01:39.353108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.426394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.435910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.448931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.507346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.522843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.533918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.547666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.788555Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976099825, txId: 281474976710670] shutting down Trying to start YDB, gRPC: 12941, MsgBus: 8653 2025-07-08T12:01:40.220293Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680054059768516:2062];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:40.220355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010d7/r3tmp/tmpiOFqe6/pdisk_1.dat 2025-07-08T12:01:40.242141Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12941, node 2 2025-07-08T12:01:40.249765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:40.249777Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:40.249779Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:40.249825Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8653 TClient is connected to server localhost:8653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:40.329198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:40.329225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:40.329646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.329878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:40.330762Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:40.338051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.361615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.386668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.410530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.588082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.598279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.615194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.624673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.641488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.653387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.667479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.937520Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976100980, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 63637, MsgBus: 15880 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010d7/r3tmp/tmpBCoSXE/pdisk_1.dat 2025-07-08T12:01:41.337309Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-07-08T12:01:41.338525Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63637, node 3 2025-07-08T12:01:41.349049Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:41.349062Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:41.349065Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:41.349111Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15880 2025-07-08T12:01:41.400167Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:41.400202Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:41.405438Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:41.415436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.426253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.444886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.477189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.494398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.669422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.681535Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.690686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.702693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.716989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.733310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.750880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.952803Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976101988, txId: 281474976715670] shutting down ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpSplit::AfterResultMultiRangeSegmentPartition+Ascending [GOOD] Test command err: Trying to start YDB, gRPC: 4864, MsgBus: 26581 2025-07-08T12:01:38.252808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680046611976311:2226];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.252855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010d9/r3tmp/tmptty53c/pdisk_1.dat 2025-07-08T12:01:38.305287Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4864, node 1 2025-07-08T12:01:38.327433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.327444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.327445Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.327478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26581 TClient is connected to server localhost:26581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:38.378479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.381010Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.383895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.383922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.385057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:38.386027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.452374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.473386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.484999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.679491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.690042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.699814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.714888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.727826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.789311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.798848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.003438Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jzmykf44f68ra4qcs869tkqr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JmM2Q4ZjktMmZjNmFmMDItMmM0MDAxZDQtNTdjODZmMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976710672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:39.252846Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:39.350177Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976099048, txId: 281474976710670] shutting down Trying to start YDB, gRPC: 2866, MsgBus: 63550 2025-07-08T12:01:39.648037Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680052581244737:2064];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.648062Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010d9/r3tmp/tmpozFdmY/pdisk_1.dat 2025-07-08T12:01:39.664152Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2866, node 2 2025-07-08T12:01:39.689519Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.689529Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.689531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.689575Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63550 TClient is connected to server localhost:63550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.752023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.752052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.752399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.753108Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:39.799388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.856639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.885754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.909864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.995684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.004975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.018639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.029777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.045524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.059429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.119544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.345071Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jzmykgds47j2p3prr8tqjc34, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWM5OWJhZGEtNTMwNTVkZDctMzliMmZjNDMtZDJjYjU5YmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root captured evread ----------------------------------------------------------- starting split ----------------------------------------------------------- scheme op Status: 53 TxId: 281474976715672 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 captured evreadresult ----------------------------------------------------------- resume evread ----------------------------------------------------------- 2025-07-08T12:01:40.617428Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976100392, txId: 281474976715670] shutting down 2025-07-08T12:01:40.649381Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-07-08T12:00:53.730284Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1751976053730275 2025-07-08T12:00:53.892108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524679854947867321:2077];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.892310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.896676Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524679851752662059:2158];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:53.897477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-07-08T12:00:53.928417Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001f10/r3tmp/tmpJhfzib/pdisk_1.dat 2025-07-08T12:00:53.932967Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-07-08T12:00:53.953291Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22473, node 1 2025-07-08T12:00:53.989155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001f10/r3tmp/yandexls5MFt.tmp 2025-07-08T12:00:53.989171Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001f10/r3tmp/yandexls5MFt.tmp 2025-07-08T12:00:53.992322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:53.992351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:53.994108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:00:53.994351Z INFO: TTestServer started on Port 18380 GrpcPort 22473 TClient is connected to server localhost:18380 PQClient connected to localhost:22473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:00:54.019126Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001f10/r3tmp/yandexls5MFt.tmp 2025-07-08T12:00:54.019262Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:00:54.027701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.028207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:00:54.028238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:00:54.029647Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-07-08T12:00:54.029965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:00:54.033518Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T12:00:54.281302Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524679859242835462:2292], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.281431Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmE1ZDhlOWEtMWZjYmM3YS05YTdlOWU3OS0zMmViODNhNQ==, ActorId: [1:7524679859242835459:2290], ActorState: ExecuteState, TraceId: 01jzmyj3f789pfd7479qpamyce, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.281870Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.282680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.283323Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7524679856047629483:2263], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:00:54.283750Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjZlZTdlNGYtYjk0ZjQwNmEtYjI1MTg0MmYtMTQ0MmIwNzQ=, ActorId: [2:7524679856047629481:2262], ActorState: ExecuteState, TraceId: 01jzmyj3fh51tzezzwezj00s4y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:00:54.283873Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:00:54.352475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-07-08T12:00:54.423405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:22473", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-07-08T12:00:54.522159Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jzmyj3pr0hmgvk7vkqfqze37, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk2ZDE5NDEtZWQyMDUyOWEtOWI0MjBhNTEtYTY4MzM3YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7524679859242835849:2878] 2025-07-08T12:00:54.894792Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:54.897051Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:00:58.892822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7524679854947867321:2077];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.892846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-07-08T12:00:58.896867Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7524679851752662059:2158];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:00:58.896893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-07-08T12:00:59.575732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 5 partitions CallPersQueueGRPC request to localhost:22473 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-07-08T12:00:59.677189Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:22473 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976720677 SchemeShardTabletId: 72057594046644480 PathId: 9 } ErrorCode: OK AddTopic: rt3.dc1--test-topic ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = test-topic, dc = dc1 2025-07-08T12:00:59.713863Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7524679880717672904:3231] connected; active server actors: 1 2025-07-08T12:00:59.713939Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] updating configuration ... FO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|89de01ea-7f582f93-75946cd7-7b963062_0 2025-07-08T12:01:09.960494Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1751976069960 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-07-08T12:01:09.960525Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|89de01ea-7f582f93-75946cd7-7b963062_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-07-08T12:01:09.960604Z :INFO: [] MessageGroupId [src] SessionId [src|89de01ea-7f582f93-75946cd7-7b963062_0] Write session: close. Timeout = 0 ms 2025-07-08T12:01:09.960609Z :INFO: [] MessageGroupId [src] SessionId [src|89de01ea-7f582f93-75946cd7-7b963062_0] Write session will now close 2025-07-08T12:01:09.960613Z :DEBUG: [] MessageGroupId [src] SessionId [src|89de01ea-7f582f93-75946cd7-7b963062_0] Write session: aborting 2025-07-08T12:01:09.960689Z :INFO: [] MessageGroupId [src] SessionId [src|89de01ea-7f582f93-75946cd7-7b963062_0] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:09.960694Z :DEBUG: [] MessageGroupId [src] SessionId [src|89de01ea-7f582f93-75946cd7-7b963062_0] Write session: destroy 2025-07-08T12:01:09.961044Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|89de01ea-7f582f93-75946cd7-7b963062_0 grpc read done: success: 0 data: 2025-07-08T12:01:09.961051Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|89de01ea-7f582f93-75946cd7-7b963062_0 grpc read failed 2025-07-08T12:01:09.961056Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|89de01ea-7f582f93-75946cd7-7b963062_0 grpc closed 2025-07-08T12:01:09.961060Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|89de01ea-7f582f93-75946cd7-7b963062_0 is DEAD 2025-07-08T12:01:09.961196Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-07-08T12:01:09.961610Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7524679923794033763:2444] destroyed 2025-07-08T12:01:09.961627Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-07-08T12:01:10.069547Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:12.070540Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay === In the next federation discovery response dc2 will be available 2025-07-08T12:01:14.071136Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:16.073024Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-07-08T12:01:17.801425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-07-08T12:01:17.801455Z node 3 :IMPORT WARN: Table profiles were not loaded Test retry state: get retry delay 2025-07-08T12:01:18.077037Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:20.082736Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:22.083182Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:24.084199Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:26.086641Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:28.087800Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:30.088170Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:32.089156Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:34.090138Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:36.097089Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-07-08T12:01:38.098131Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-07-08T12:01:38.844353Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-07-08T12:01:38.844393Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-07-08T12:01:38.844730Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-07-08T12:01:38.845470Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 9 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-07-08T12:01:38.845615Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 === Waiting for repair >>> Ready to answer: ok 2025-07-08T12:01:40.105103Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:31718" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:31718" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:31718" location: "dc3" status: AVAILABLE weight: 500 } ] } === Closing the session 2025-07-08T12:01:40.113180Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-07-08T12:01:40.113450Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-07-08T12:01:40.115751Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: write to message_group: src_id 2025-07-08T12:01:40.115869Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: send init request: init_request { path: "test-topic" message_group_id: "src_id" } 2025-07-08T12:01:40.116051Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: OnWriteDone gRpcStatusCode: 0 2025-07-08T12:01:40.116044Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-07-08T12:01:40.116062Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-07-08T12:01:40.116238Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { path: "test-topic" message_group_id: "src_id" } 2025-07-08T12:01:40.116282Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 path: "test-topic" message_group_id: "src_id" from ipv6:[::1]:39836 2025-07-08T12:01:40.116287Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="topic server" ip=ipv6:[::1]:39836 proto=topic topic=test-topic durationSec=0 2025-07-08T12:01:40.116290Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-07-08T12:01:40.118389Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-07-08T12:01:40.118402Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-07-08T12:01:40.118422Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-07-08T12:01:40.118584Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-07-08T12:01:40.118607Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2025-07-08T12:01:40.119169Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-07-08T12:01:40.119223Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-07-08T12:01:40.119224Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-07-08T12:01:40.119226Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-07-08T12:01:40.119231Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524680056938021747:2772] (SourceId=src_id, PreferedPartition=(NULL)) StartKqpSession 2025-07-08T12:01:40.119696Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7524680056938021747:2772] (SourceId=src_id, PreferedPartition=(NULL)) Select from the table 2025-07-08T12:01:40.119965Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: grpc closed 2025-07-08T12:01:40.119971Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: is DEAD 2025-07-08T12:01:40.294793Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7524680056938021788:2776] TxId: 281474976720745. Ctx: { TraceId: 01jzmykgcwex0mpt7shttdjr8h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDA1YTZmOTAtOWVlOTE3NTktZGY2YWUxNy1iNTY4MGM0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-07-08T12:01:40.295207Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680056938021796:2783], TxId: 281474976720745, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jzmykgcwex0mpt7shttdjr8h. SessionId : ydb://session/3?node_id=3&id=ZDA1YTZmOTAtOWVlOTE3NTktZGY2YWUxNy1iNTY4MGM0OQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : . Database : /Root. }. Handle abort execution event from: [3:7524680056938021788:2776], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-07-08T12:01:40.295229Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7524680056938021798:2784], TxId: 281474976720745, task: 4. Ctx: { TraceId : 01jzmykgcwex0mpt7shttdjr8h. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=ZDA1YTZmOTAtOWVlOTE3NTktZGY2YWUxNy1iNTY4MGM0OQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : . }. Handle abort execution event from: [3:7524680056938021788:2776], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-07-08T12:01:39.204718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680049189977826:2063];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.204739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001b31/r3tmp/tmpzweZIS/pdisk_1.dat 2025-07-08T12:01:39.271996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25375, node 1 2025-07-08T12:01:39.284761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/43nv/001b31/r3tmp/yandexPcb2Ij.tmp 2025-07-08T12:01:39.284774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/43nv/001b31/r3tmp/yandexPcb2Ij.tmp 2025-07-08T12:01:39.284828Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/43nv/001b31/r3tmp/yandexPcb2Ij.tmp 2025-07-08T12:01:39.284869Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15109 PQClient connected to localhost:25375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-07-08T12:01:39.307327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.307356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.308399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-07-08T12:01:39.315381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-07-08T12:01:39.576061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.576327Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7524680049189978474:2288], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-07-08T12:01:39.576798Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDQzOTMyNDAtZmFiNjYxODItMzNjNmQ3MTktY2ZmZTgwOWI=, ActorId: [1:7524680049189978471:2286], ActorState: ExecuteState, TraceId: 01jzmykfph6kcpfm44z13xd69m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-07-08T12:01:39.577211Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-07-08T12:01:39.611126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.643958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-07-08T12:01:39.739745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jzmykfw446a8m70753bzamfp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM0NGRhMGEtMzA2ZmMzNmMtN2E4MTMxNjAtMTAyMmRmMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-07-08T12:01:40.206698Z node 1 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/scan/unittest >> KqpScan::EmptySet [GOOD] Test command err: Trying to start YDB, gRPC: 26378, MsgBus: 24988 2025-07-08T12:01:38.198345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680047881381056:2184];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:38.198621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010df/r3tmp/tmpMwgB6p/pdisk_1.dat 2025-07-08T12:01:38.258805Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26378, node 1 2025-07-08T12:01:38.277144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:38.277158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:38.277160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:38.277201Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24988 2025-07-08T12:01:38.330392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:38.330425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:38.331369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24988 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:38.345834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.348792Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:38.395428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.463139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.483039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.494964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:38.588682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.607850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.626754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.636748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.695715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.709040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.724050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:38.913094Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7524680047881383415:2446] TxId: 281474976715671. Ctx: { TraceId: 01jzmykf242hm2k5ywmw3fbknk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDYzOTNmYTctODUwNzE3NWMtOTk4MWJiYjgtZTc0ZDhhMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Can not find default state storage group for database /Root 2025-07-08T12:01:38.917011Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976098957, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 7951, MsgBus: 13363 2025-07-08T12:01:39.106126Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7524680048698104306:2061];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.106158Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010df/r3tmp/tmpmhx1dT/pdisk_1.dat 2025-07-08T12:01:39.118510Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7951, node 2 2025-07-08T12:01:39.127718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.127730Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.127731Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.127773Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13363 TClient is connected to server localhost:13363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.207374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.207406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.207841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.209343Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:39.222610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.279200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.301103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.365983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.456720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.466439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.476802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.491804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.506494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.520546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:39.535192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.862283Z node 2 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:40.888129Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976099839, txId: 281474976715670] shutting down Trying to start YDB, gRPC: 22725, MsgBus: 4003 2025-07-08T12:01:41.121829Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7524680060807880150:2072];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:41.122492Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/0010df/r3tmp/tmpeGyNxD/pdisk_1.dat 2025-07-08T12:01:41.148100Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22725, node 3 2025-07-08T12:01:41.197549Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:41.197561Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:41.197564Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:41.197612Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4003 2025-07-08T12:01:41.229056Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:41.229091Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:41.229790Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:41.264544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.266148Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:41.269258Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.286532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.325686Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.349058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:41.533307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.547176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.561806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.570816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.584232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.601094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.619153Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-07-08T12:01:41.828806Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1751976101827, txId: 281474976715670] shutting down >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> DataStreams::TestShardPagination [GOOD] >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] |69.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |69.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |69.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |69.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] |69.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> DataStreams::TestDeleteStreamWithEnforceFlagFalse |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/dsproxy/ut_ftol/unittest >> TBsProxyFaultToleranceTest::CheckTPutFaultToleranceTestErasureMirror3dc [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-07-08T12:01:39.402088Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7524680052403027031:2143];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:39.402172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001544/r3tmp/tmpK0c5UP/pdisk_1.dat 2025-07-08T12:01:39.477641Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25259, node 1 2025-07-08T12:01:39.497188Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:39.497201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:39.497203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:39.497244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-07-08T12:01:39.509115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:39.509144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:39.512678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:39.538294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.595118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:18695 2025-07-08T12:01:39.619175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:39.624779Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-07-08T12:01:39.717945Z node 1 :TX_PROXY ERROR: Actor# [1:7524680052403028938:3373] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp:345" severity: 1 } 2025-07-08T12:01:40.452078Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7524680054437712035:2248];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:40.456310Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001544/r3tmp/tmpAbBjS2/pdisk_1.dat 2025-07-08T12:01:40.476282Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18527, node 4 2025-07-08T12:01:40.513236Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:40.513251Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:40.513254Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:40.513313Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:40.558872Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:40.558914Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:40.560050Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-07-08T12:01:40.560916Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-07-08T12:01:40.564820Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-07-08T12:01:40.592678Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29396 2025-07-08T12:01:40.621669Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:40.632284Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-07-08T12:01:41.450726Z node 4 :TX_CONVEYOR ERROR: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-07-08T12:01:42.694514Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7524680062587674068:2143];send_to=[0:7307199536658146131:7762515]; 2025-07-08T12:01:42.694730Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/43nv/001544/r3tmp/tmpnMaBhv/pdisk_1.dat 2025-07-08T12:01:42.726913Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2695, node 7 2025-07-08T12:01:42.748820Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-07-08T12:01:42.748835Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-07-08T12:01:42.748837Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-07-08T12:01:42.748899Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-07-08T12:01:42.797487Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-07-08T12:01:42.797529Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-07-08T12:01:42.798520Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-07-08T12:01:42.800092Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-07-08T12:01:42.819123Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8728 2025-07-08T12:01:42.849654Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... |70.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> BasicUsage::SimpleHandlers [GOOD] >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation |70.0%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |70.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> DataStreams::TestPutRecordsWithRead [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> CompositeConveyorTests::Test10xMultiDistribution >> DataStreams::TestPutRecordsCornerCases >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest |70.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/conveyor_composite/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive ------- [LD] {default-linux-x86_64, relwithdebinfo, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut command /home/runner/.ya/tools/v4/2989598506/python /home/runner/actions_runner/_work/ydb/ydb/build/scripts/link_exe.py --start-plugins --end-plugins --clang-ver 18 --source-root /home/runner/actions_runner/_work/ydb/ydb --build-root /home/runner/.ya/build/build_root/43nv/001237 --arch=LINUX --objcopy-exe /home/runner/.ya/tools/v4/7599469156/bin/llvm-objcopy /home/runner/.ya/tools/v4/7599469156/bin/clang++ -Wl,--whole-archive @/home/runner/.ya/build/build_root/43nv/001237/ya_command_file_0.args -Wl,--no-whole-archive /home/runner/.ya/build/build_root/43nv/001237/ydb/core/kqp/runtime/ut/__vcs_version__.c.o /home/runner/.ya/build/build_root/43nv/001237/ydb/core/kqp/runtime/ut/__/kqp_scan_data_ut.cpp.o /home/runner/.ya/build/build_root/43nv/001237/ydb/core/kqp/runtime/ut/__/kqp_compute_scheduler_ut.cpp.o /home/runner/.ya/build/build_root/43nv/001237/ydb/core/kqp/runtime/ut/__/kqp_scan_fetcher_ut.cpp.o -o /home/runner/.ya/build/build_root/43nv/001237/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut --target=x86_64-linux-gnu --sysroot=/home/runner/.ya/tools/v4/243881345 -B/home/runner/.ya/tools/v4/243881345/usr/bin -Wl,--start-group contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a contrib/libs/libunwind/libcontrib-libs-libunwind.a contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a util/charset/libutil-charset.a contrib/libs/zlib/libcontrib-libs-zlib.a contrib/libs/double-conversion/libcontrib-libs-double-conversion.a contrib/libs/libc_compat/libcontrib-libs-libc_compat.a util/libyutil.a library/cpp/malloc/api/libcpp-malloc-api.a contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a library/cpp/colorizer/liblibrary-cpp-colorizer.a library/cpp/dbg_output/liblibrary-cpp-dbg_output.a library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a library/cpp/lcs/liblibrary-cpp-lcs.a library/cpp/containers/stack_array/libcpp-containers-stack_array.a library/cpp/diff/liblibrary-cpp-diff.a library/cpp/json/common/libcpp-json-common.a library/cpp/json/fast_sax/libcpp-json-fast_sax.a tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a library/cpp/json/writer/libcpp-json-writer.a library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a library/cpp/json/liblibrary-cpp-json.a library/cpp/testing/common/libcpp-testing-common.a library/cpp/testing/hook/libcpp-testing-hook.a library/cpp/testing/unittest/libcpp-testing-unittest.a library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a contrib/libs/protobuf/libcontrib-libs-protobuf.a contrib/libs/apache/orc-format/liblibs-apache-orc-format.a contrib/libs/xxhash/libcontrib-libs-xxhash.a contrib/libs/lz4/libcontrib-libs-lz4.a contrib/libs/snappy/libcontrib-libs-snappy.a contrib/libs/zstd/libcontrib-libs-zstd.a contrib/libs/apache/orc/liblibs-apache-orc.a contrib/libs/brotli/common/liblibs-brotli-common.a contrib/libs/brotli/dec/liblibs-brotli-dec.a contrib/libs/brotli/enc/liblibs-brotli-enc.a contrib/libs/re2/libcontrib-libs-re2.a contrib/libs/utf8proc/libcontrib-libs-utf8proc.a contrib/libs/libevent/event_core/liblibs-libevent-event_core.a contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a contrib/libs/openssl/libcontrib-libs-openssl.a contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a contrib/restricted/boost/container/librestricted-boost-container.a contrib/libs/icu/libcontrib-libs-icu.a contrib/restricted/boost/atomic/librestricted-boost-atomic.a contrib/restricted/boost/chrono/librestricted-boost-chrono.a contrib/restricted/boost/exception/librestricted-boost-exception.a contrib/restricted/boost/regex/librestricted-boost-regex.a contrib/restricted/boost/thread/librestricted-boost-thread.a contrib/restricted/boost/locale/librestricted-boost-locale.a contrib/restricted/boost/random/librestricted-boost-random.a contrib/restricted/thrift/libcontrib-restricted-thrift.a contrib/restricted/uriparser/libcontrib-restricted-uriparser.a contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a contrib/libs/apache/arrow/liblibs-apache-arrow.a library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a ydb/library/actors/util/liblibrary-actors-util.a library/cpp/charset/lite/libcpp-charset-lite.a contrib/libs/libiconv/static/liblibs-libiconv-static.a library/cpp/charset/liblibrary-cpp-charset.a library/cpp/containers/str_map/libcpp-containers-str_map.a library/cpp/containers/atomizer/libcpp-containers-atomizer.a ydb/library/actors/prof/liblibrary-actors-prof.a ydb/library/actors/actor_type/liblibrary-actors-actor_type.a ydb/library/actors/protos/liblibrary-actors-protos.a library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a library/cpp/resource/liblibrary-cpp-resource.a certs/libcerts.a contrib/libs/c-ares/libcontrib-libs-c-ares.a contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a contrib/libs/grpc/libcontrib-libs-grpc.a ydb/library/services/libydb-library-services.a library/cpp/logger/liblibrary-cpp-logger.a library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a library/cpp/lwtrace/liblibrary-cpp-lwtrace.a library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a library/cpp/monlib/metrics/libcpp-monlib-metrics.a library/cpp/monlib/encode/libcpp-monlib-encode.a library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a library/cpp/monlib/exception/libcpp-monlib-exception.a library/cpp/monlib/encode/json/libmonlib-encode-json.a library/cpp/monlib/encode/spack/libmonlib-encode-spack.a library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a contrib/libs/base64/avx2/liblibs-base64-avx2.a contrib/libs/base64/ssse3/liblibs-base64-ssse3.a contrib/libs/base64/neon32/liblibs-base64-neon32.a contrib/libs/base64/neon64/liblibs-base64-neon64.a contrib/libs/base64/plain32/liblibs-base64-plain32.a contrib/libs/base64/plain64/liblibs-base64-plain64.a library/cpp/string_utils/base64/libcpp-string_utils-base64.a library/cpp/build_info/liblibrary-cpp-build_info.a library/cpp/svnversion/liblibrary-cpp-svnversion.a library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a library/cpp/coroutine/engine/libcpp-coroutine-engine.a library/cpp/coroutine/listener/libcpp-coroutine-listener.a contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a library/cpp/digest/md5/libcpp-digest-md5.a library/cpp/digest/murmur/libcpp-digest-murmur.a library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a library/cpp/iterator/liblibrary-cpp-iterator.a library/cpp/string_utils/quote/libcpp-string_utils-quote.a library/cpp/string_utils/scan/libcpp-string_utils-scan.a library/cpp/cgiparam/liblibrary-cpp-cgiparam.a library/cpp/digest/lower_case/libcpp-digest-lower_case.a library/cpp/http/misc/libcpp-http-misc.a library/cpp/mime/types/libcpp-mime-types.a contrib/libs/libidn/static/liblibs-libidn-static.a library/cpp/uri/liblibrary-cpp-uri.a library/cpp/http/fetch/libcpp-http-fetch.a contrib/libs/libbz2/libcontrib-libs-libbz2.a contrib/libs/fastlz/libcontrib-libs-fastlz.a contrib/libs/zstd06/libcontrib-libs-zstd06.a contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a library/cpp/streams/brotli/libcpp-streams-brotli.a library/cpp/streams/bzip2/libcpp-streams-bzip2.a library/cpp/streams/lzma/libcpp-streams-lzma.a library/cpp/http/io/libcpp-http-io.a library/cpp/threading/equeue/libcpp-threading-equeue.a library/cpp/http/server/libcpp-http-server.a library/cpp/monlib/service/libcpp-monlib-service.a library/cpp/monlib/encode/text/libmonlib-encode-text.a library/cpp/monlib/service/pages/libmonlib-service-pages.a library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a library/cpp/time_provider/liblibrary-cpp-time_provider.a ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a library/cpp/threading/queue/libcpp-threading-queue.a contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a ydb/library/actors/memory_log/liblibrary-actors-memory_log.a library/cpp/execprofile/liblibrary-cpp-execprofile.a library/cpp/threading/future/libcpp-threading-future.a ydb/library/actors/core/liblibrary-actors-core.a ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a contrib/libs/crcutil/libcontrib-libs-crcutil.a library/cpp/digest/crc32c/libcpp-digest-crc32c.a library/cpp/html/pcdata/libcpp-html-pcdata.a library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a library/cpp/monlib/service/pages/resources/libservice-pages-resources.a library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.a library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a library/cpp/packedtypes/liblibrary-cpp-packedtypes.a library/cpp/sliding_window/liblibrary-cpp-sliding_window.a ydb/library/actors/helpers/liblibrary-actors-helpers.a ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a ydb/library/actors/wilson/liblibrary-actors-wilson.a library/cpp/openssl/init/libcpp-openssl-init.a ydb/library/actors/interconnect/liblibrary-actors-interconnect.a library/cpp/containers/2d_array/libcpp-containers-2d_array.a library/cpp/binsaver/liblibrary-cpp-binsaver.a library/cpp/protobuf/util/proto/libprotobuf-util-proto.a library/cpp/protobuf/util/libcpp-protobuf-util.a ydb/library/aclib/protos/liblibrary-aclib-protos.a ydb/library/aclib/libydb-library-aclib.a library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a library/cpp/sse/liblibrary-cpp-sse.a library/cpp/dot_product/liblibrary-cpp-dot_product.a library/cpp/l2_distance/liblibrary-cpp-l2_distance.a library/cpp/random_provider/liblibrary-cpp-random_provider.a ydb/core/config/protos/libcore-config-protos.a ydb/library/folder_service/proto/liblibrary-folder_service-proto.a ydb/public/api/protos/annotations/libapi-protos-annotations.a ydb/public/api/protos/libapi-protos.a yql/essentials/public/issue/protos/libpublic-issue-protos.a yql/essentials/core/issue/protos/libcore-issue-protos.a ydb/library/yql/dq/proto/libyql-dq-proto.a yql/essentials/public/types/libessentials-public-types.a ydb/library/yql/dq/actors/protos/libdq-actors-protos.a yql/essentials/protos/libyql-essentials-protos.a yql/essentials/providers/common/proto/libproviders-common-proto.a ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a ydb/core/fq/libs/config/protos/liblibs-config-protos.a ydb/core/protos/schemeshard/libcore-protos-schemeshard.a ydb/core/scheme/protos/libcore-scheme-protos.a ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a ydb/library/login/protos/liblibrary-login-protos.a ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a ydb/core/protos/libydb-core-protos.a ydb/core/base/generated/libcore-base-generated.a ydb/core/debug/libydb-core-debug.a library/cpp/digest/old_crc/libcpp-digest-old_crc.a ydb/core/erasure/libydb-core-erasure.a ydb/core/graph/protos/libcore-graph-protos.a ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a ydb/core/protos/out/libcore-protos-out.a library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a library/cpp/digest/argonish/libcpp-digest-argonish.a ydb/library/login/password_checker/liblibrary-login-password_checker.a ydb/library/login/account_lockout/liblibrary-login-account_lockout.a ydb/library/login/libydb-library-login.a contrib/libs/libaio/static/liblibs-libaio-static.a contrib/libs/liburing/libcontrib-libs-liburing.a ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a ydb/library/pdisk_io/libydb-library-pdisk_io.a ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a yql/essentials/utils/libyql-essentials-utils.a yql/essentials/public/issue/libessentials-public-issue.a ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a ydb/library/ydb_issue/libydb-library-ydb_issue.a ydb/public/api/protos/out/libapi-protos-out.a contrib/libs/cctz/libcontrib-libs-cctz.a library/cpp/enumbitset/liblibrary-cpp-enumbitset.a library/cpp/yt/assert/libcpp-yt-assert.a library/cpp/yt/exception/libcpp-yt-exception.a library/cpp/yt/misc/libcpp-yt-misc.a library/cpp/yt/malloc/libcpp-yt-malloc.a library/cpp/yt/string/libcpp-yt-string.a library/cpp/yt/system/libcpp-yt-system.a library/cpp/yt/memory/libcpp-yt-memory.a library/cpp/yt/yson_string/libcpp-yt-yson_string.a library/cpp/yt/yson/libcpp-yt-yson.a library/cpp/yson/liblibrary-cpp-yson.a yql/essentials/core/pg_settings/libessentials-core-pg_settings.a yql/essentials/core/sql_types/libessentials-core-sql_types.a yql/essentials/core/issue/libessentials-core-issue.a library/cpp/yson_pull/libyson_pull.a yql/essentials/public/decimal/libessentials-public-decimal.a yql/essentials/public/udf/libessentials-public-udf.a yql/essentials/minikql/dom/libessentials-minikql-dom.a yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a library/cpp/logger/global/libcpp-logger-global.a yql/essentials/utils/log/proto/libutils-log-proto.a contrib/libs/backtrace/libcontrib-libs-backtrace.a yql/essentials/utils/backtrace/libessentials-utils-backtrace.a yql/essentials/utils/log/libessentials-utils-log.a yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a util/draft/libutil-draft.a library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a library/cpp/yson/json/libcpp-yson-json.a library/cpp/yson/node/libcpp-yson-node.a library/cpp/openssl/holders/libcpp-openssl-holders.a library/cpp/openssl/method/libcpp-openssl-method.a library/cpp/openssl/io/libcpp-openssl-io.a yql/essentials/utils/fetch/libessentials-utils-fetch.a yql/essentials/core/credentials/libessentials-core-credentials.a yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a yql/essentials/ast/libyql-essentials-ast.a yql/essentials/public/udf/arrow/libpublic-udf-arrow.a yql/essentials/core/cbo/libessentials-core-cbo.a library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a yql/essentials/public/udf/tz/libpublic-udf-tz.a contrib/libs/simdjson/libcontrib-libs-simdjson.a yql/essentials/types/binary_json/libessentials-types-binary_json.a yql/essentials/types/dynumber/libessentials-types-dynumber.a yql/essentials/types/uuid/libessentials-types-uuid.a yql/essentials/minikql/libyql-essentials-minikql.a library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a ydb/core/base/libydb-core-base.a library/cpp/getopt/small/libcpp-getopt-small.a ydb/library/global_plugins/libydb-library-global_plugins.a ydb/core/viewer/protos/libcore-viewer-protos.a ydb/core/viewer/json/libcore-viewer-json.a ydb/core/driver_lib/version/libversion.a library/cpp/protobuf/json/proto/libprotobuf-json-proto.a library/cpp/protobuf/json/libcpp-protobuf-json.a library/cpp/string_utils/url/libcpp-string_utils-url.a library/cpp/http/simple/libcpp-http-simple.a contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a ydb/public/api/client/yc_public/common/libclient-yc_public-common.a ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a ydb/public/api/grpc/libapi-grpc.a ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a ydb/library/security/libydb-library-security.a ydb/library/protobuf_printer/libydb-library-protobuf_printer.a ydb/library/grpc/server/liblibrary-grpc-server.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a ydb/core/graph/service/libcore-graph-service.a ydb/core/sys_view/service/libcore-sys_view-service.a ydb/core/grpc_services/counters/libcore-grpc_services-counters.a ydb/core/grpc_streaming/libydb-core-grpc_streaming.a ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a library/cpp/dns/liblibrary-cpp-dns.a ydb/library/actors/http/liblibrary-actors-http.a ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a ydb/core/mon/libydb-core-mon.a ydb/core/blobstorage/base/libcore-blobstorage-base.a contrib/libs/t1ha/libcontrib-libs-t1ha.a ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a library/cpp/html/escape/libcpp-html-escape.a library/cpp/int128/liblibrary-cpp-int128.a library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a library/cpp/ipmath/liblibrary-cpp-ipmath.a contrib/libs/nghttp2/libcontrib-libs-nghttp2.a contrib/libs/nghttp3/libcontrib-libs-nghttp3.a contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a contrib/libs/curl/libcontrib-libs-curl.a contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a contrib/restricted/aws/s2n/librestricted-aws-s2n.a contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a ydb/core/util/libydb-core-util.a ydb/core/actorlib_impl/libydb-core-actorlib_impl.a library/cpp/containers/bitseq/libcpp-containers-bitseq.a ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a ydb/core/scheme_types/libydb-core-scheme_types.a ydb/core/scheme/libydb-core-scheme.a ydb/core/graph/shard/protos/libgraph-shard-protos.a library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a library/cpp/cache/liblibrary-cpp-cache.a library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a ydb/core/control/libydb-core-control.a ydb/core/mon_alloc/libydb-core-mon_alloc.a ydb/core/tracing/libydb-core-tracing.a ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a ydb/core/tablet/libydb-core-tablet.a yql/essentials/minikql/arrow/libessentials-minikql-arrow.a yql/essentials/minikql/computation/libessentials-minikql-computation.a yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a yql/essentials/core/file_storage/download/libcore-file_storage-download.a yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a yql/essentials/core/file_storage/libessentials-core-file_storage.a yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a yql/essentials/parser/proto_ast/libessentials-parser-proto_ast.a contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a yql/essentials/core/minsketch/libessentials-core-minsketch.a library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a library/cpp/deprecated/split/libcpp-deprecated-split.a yql/essentials/sql/settings/libessentials-sql-settings.a yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a yql/essentials/sql/libyql-essentials-sql.a yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a yql/essentials/core/libyql-essentials-core.a yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a yql/essentials/providers/common/mkql/libproviders-common-mkql.a yql/essentials/public/result_format/libessentials-public-result_format.a yql/essentials/providers/common/codec/libproviders-common-codec.a ydb/library/mkql_proto/libydb-library-mkql_proto.a ydb/core/engine/libydb-core-engine.a ydb/core/docapi/libydb-core-docapi.a ydb/public/api/grpc/draft/libapi-grpc-draft.a ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a ydb/library/yql/dq/actors/libyql-dq-actors.a ydb/core/kqp/common/simple/libkqp-common-simple.a yql/essentials/providers/common/provider/libproviders-common-provider.a yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a ydb/core/kqp/query_data/libcore-kqp-query_data.a yql/essentials/ast/serialize/libessentials-ast-serialize.a yql/essentials/core/common_opt/libessentials-core-common_opt.a yql/essentials/core/type_ann/libessentials-core-type_ann.a yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a yql/essentials/core/services/libessentials-core-services.a library/cpp/streams/zstd/libcpp-streams-zstd.a ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a yql/essentials/providers/common/config/libproviders-common-config.a yql/essentials/providers/common/gateway/libproviders-common-gateway.a yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a yql/essentials/providers/result/provider/libproviders-result-provider.a yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a yql/essentials/sql/v1/lexer/libsql-v1-lexer.a yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a yql/essentials/sql/v1/libessentials-sql-v1.a ydb/core/kqp/provider/libcore-kqp-provider.a ydb/core/client/minikql_compile/libcore-client-minikql_compile.a ydb/core/formats/libydb-core-formats.a ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a ydb/core/tablet_flat/libydb-core-tablet_flat.a ydb/core/engine/minikql/libcore-engine-minikql.a ydb/core/kqp/common/batch/libkqp-common-batch.a ydb/core/kqp/common/compilation/libkqp-common-compilation.a ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a ydb/core/resource_pools/libydb-core-resource_pools.a ydb/core/kqp/common/events/libkqp-common-events.a ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a ydb/core/formats/arrow/switch/libformats-arrow-switch.a ydb/library/conclusion/libydb-library-conclusion.a ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a ydb/library/arrow_kernels/libydb-library-arrow_kernels.a yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a ydb/library/formats/arrow/liblibrary-formats-arrow.a ydb/library/accessor/libydb-library-accessor.a ydb/services/metadata/abstract/libservices-metadata-abstract.a ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a ydb/core/formats/arrow/common/libformats-arrow-common.a ydb/core/formats/arrow/reader/libformats-arrow-reader.a ydb/core/formats/arrow/hash/libformats-arrow-hash.a ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a ydb/library/signals/libydb-library-signals.a ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a ydb/core/formats/arrow/rows/libformats-arrow-rows.a ydb/core/formats/arrow/libcore-formats-arrow.a yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a ydb/core/formats/arrow/program/libformats-arrow-program.a ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a ydb/core/tx/columnshard/common/libtx-columnshard-common.a ydb/core/tx/sharding/libcore-tx-sharding.a ydb/library/yql/dq/common/libyql-dq-common.a yql/essentials/core/dq_integration/libessentials-core-dq_integration.a ydb/core/kqp/common/libcore-kqp-common.a ydb/core/kqp/common/buffer/libkqp-common-buffer.a ydb/core/ydb_convert/libydb-core-ydb_convert.a ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a ydb/library/yql/dq/runtime/libyql-dq-runtime.a ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a ydb/core/kqp/runtime/libcore-kqp-runtime.a contrib/libs/pcre/libcontrib-libs-pcre.a contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a library/cpp/regex/pcre/libcpp-regex-pcre.a ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a ydb/library/schlab/probes/liblibrary-schlab-probes.a ydb/library/schlab/schine/liblibrary-schlab-schine.a ydb/library/schlab/libydb-library-schlab.a ydb/library/schlab/protos/liblibrary-schlab-protos.a ydb/library/schlab/schoot/liblibrary-schlab-schoot.a ydb/library/schlab/schemu/liblibrary-schlab-schemu.a ydb/library/schlab/mon/liblibrary-schlab-mon.a ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a ydb/core/blobstorage/common/libcore-blobstorage-common.a ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a library/cpp/openssl/crypto/libcpp-openssl-crypto.a ydb/core/blob_depot/libydb-core-blob_depot.a ydb/core/blob_depot/agent/libcore-blob_depot-agent.a ydb/core/cms/console/util/libcms-console-util.a contrib/libs/libfyaml/libcontrib-libs-libfyaml.a ydb/library/fyamlcpp/libydb-library-fyamlcpp.a ydb/library/yaml_config/protos/libyaml-config-protos.a contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a ydb/library/yaml_config/public/liblibrary-yaml_config-public.a ydb/library/yaml_json/libydb-library-yaml_json.a ydb/library/yaml_config/libydb-library-yaml_config.a ydb/apps/version/libversion_definition.a ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a library/cpp/bit_io/liblibrary-cpp-bit_io.a library/cpp/packers/liblibrary-cpp-packers.a library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a library/cpp/containers/comptrie/libcpp-containers-comptrie.a library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a library/cpp/compproto/liblibrary-cpp-compproto.a library/cpp/comptable/liblibrary-cpp-comptable.a library/cpp/codecs/liblibrary-cpp-codecs.a library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a ydb/core/sys_view/common/libcore-sys_view-common.a ydb/core/keyvalue/protos/libcore-keyvalue-protos.a ydb/core/persqueue/config/libcore-persqueue-config.a ydb/core/tx/libydb-core-tx.a library/cpp/messagebus/actor/libmessagebus_actor.a library/cpp/messagebus/config/libcpp-messagebus-config.a library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a library/cpp/messagebus/liblibrary-cpp-messagebus.a library/cpp/messagebus/protobuf/libmessagebus_protobuf.a ydb/public/lib/base/libpublic-lib-base.a ydb/core/keyvalue/libydb-core-keyvalue.a ydb/core/persqueue/events/libcore-persqueue-events.a ydb/public/lib/deprecated/client/liblib-deprecated-client.a ydb/public/lib/value/libpublic-lib-value.a ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a ydb/core/persqueue/writer/libcore-persqueue-writer.a ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a ydb/core/tx/scheme_board/libcore-tx-scheme_board.a ydb/core/util/actorsys_test/libcore-util-actorsys_test.a ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a ydb/core/blobstorage/other/libcore-blobstorage-other.a library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a library/cpp/messagebus/www/libcpp-messagebus-www.a library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a ydb/core/client/metadata/libcore-client-metadata.a ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a ydb/core/audit/libydb-core-audit.a ydb/core/discovery/libydb-core-discovery.a yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a ydb/core/fq/libs/protos/libfq-libs-protos.a ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a yql/essentials/providers/common/activation/libproviders-common-activation.a yql/essentials/providers/config/libessentials-providers-config.a yql/essentials/core/facade/libessentials-core-facade.a ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a ydb/public/lib/yson_value/libpublic-lib-yson_value.a yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a ydb/library/yql/dq/transform/libyql-dq-transform.a ydb/library/yql/dq/tasks/libyql-dq-tasks.a yql/essentials/providers/common/metrics/libproviders-common-metrics.a yql/essentials/providers/common/transform/libproviders-common-transform.a ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a ydb/library/yql/providers/dq/common/libproviders-dq-common.a ydb/library/yql/providers/dq/config/libproviders-dq-config.a ydb/library/yql/dq/opt/libyql-dq-opt.a ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a yql/essentials/core/user_data/libessentials-core-user_data.a yql/essentials/core/services/mounts/libcore-services-mounts.a library/cpp/threading/atomic/libcpp-threading-atomic.a yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a ydb/library/yql/utils/actors/libyql-utils-actors.a ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a ydb/library/yql/dq/actors/common/libdq-actors-common.a ydb/core/quoter/public/libcore-quoter-public.a ydb/library/yql/dq/actors/compute/libdq-actors-compute.a ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a ydb/core/fq/libs/events/libfq-libs-events.a ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a ydb/core/health_check/libydb-core-health_check.a library/cpp/json/yson/libcpp-json-yson.a ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a ydb/core/metering/libydb-core-metering.a ydb/core/kesus/tablet/libcore-kesus-tablet.a ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a ydb/core/fq/libs/grpc/libfq-libs-grpc.a ydb/library/db_pool/protos/liblibrary-db_pool-protos.a library/cpp/retry/protos/libcpp-retry-protos.a library/cpp/retry/liblibrary-cpp-retry.a ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a library/cpp/json/easy_parse/libcpp-json-easy_parse.a library/cpp/protobuf/interop/libcpp-protobuf-interop.a ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a library/cpp/type_info/liblibrary-cpp-type_info.a library/cpp/yt/logging/libcpp-yt-logging.a yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a yt/yt_proto/yt/formats/libyt_proto-yt-formats.a yt/yt/library/tvm/libyt-library-tvm.a contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a contrib/libs/farmhash/libcontrib-libs-farmhash.a contrib/libs/yajl/libcontrib-libs-yajl.a library/cpp/threading/skip_list/libcpp-threading-skip_list.a library/cpp/threading/thread_local/libcpp-threading-thread_local.a library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a library/cpp/yt/threading/libcpp-yt-threading.a library/cpp/yt/global/libcpp-yt-global.a library/cpp/yt/error/libcpp-yt-error.a library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a library/cpp/ytalloc/api/libcpp-ytalloc-api.a yt/yt/build/libyt-yt-build.a yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a yt/yt_proto/yt/core/libyt_proto-yt-core.a library/cpp/yt/backtrace/libcpp-yt-backtrace.a yt/yt/library/profiling/libyt-library-profiling.a yt/yt/library/undumpable/libyt-library-undumpable.a yt/yt/library/ytprof/api/liblibrary-ytprof-api.a yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a yt/yt/library/tracing/libyt-library-tracing.a library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a yt/yt/core/libyt-yt-core.a yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a yt/cpp/mapreduce/common/libcpp-mapreduce-common.a yql/essentials/utils/threading/libessentials-utils-threading.a library/cpp/skiff/liblibrary-cpp-skiff.a yt/cpp/mapreduce/io/libcpp-mapreduce-io.a yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a yt/yql/providers/yt/common/libproviders-yt-common.a yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a yt/yt/library/decimal/libyt-library-decimal.a yt/yql/providers/yt/codec/libproviders-yt-codec.a yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a library/cpp/sighandler/liblibrary-cpp-sighandler.a library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a library/cpp/threading/cron/libcpp-threading-cron.a contrib/restricted/http-parser/libcontrib-restricted-http-parser.a yt/yt/core/http/libyt-core-http.a yt/yt/core/https/libyt-core-https.a yt/cpp/mapreduce/http/libcpp-mapreduce-http.a yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a yt/cpp/mapreduce/client/libcpp-mapreduce-client.a yql/essentials/providers/common/dq/libproviders-common-dq.a yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a yt/yql/providers/yt/lib/hash/libyt-lib-hash.a yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a yql/essentials/providers/common/schema/libproviders-common-schema.a yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a yt/yql/providers/yt/lib/schema/libyt-lib-schema.a yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a yt/yql/providers/yt/opt/libproviders-yt-opt.a yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a yt/yql/providers/yt/proto/libproviders-yt-proto.a yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a yt/yql/providers/yt/provider/libproviders-yt-provider.a yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a yt/yt/library/auth/libyt-library-auth.a yt/yt/library/re2/libyt-library-re2.a yt/yt/library/erasure/libyt-library-erasure.a yt/yt/library/numeric/libyt-library-numeric.a library/cpp/tdigest/liblibrary-cpp-tdigest.a yt/yt/library/quantile_digest/libyt-library-quantile_digest.a yt/yt_proto/yt/client/libyt_proto-yt-client.a yt/yt/client/libyt-yt-client.a yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a yt/yql/providers/yt/job/libproviders-yt-job.a yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a yt/yql/providers/yt/lib/log/libyt-lib-log.a yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a yt/yql/providers/yt/gateway/native/libyt-gateway-native.a yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a ydb/core/kqp/federated_query/libcore-kqp-federated_query.a ydb/core/blockstore/core/libcore-blockstore-core.a library/cpp/scheme/liblibrary-cpp-scheme.a ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a contrib/libs/fmt/libcontrib-libs-fmt.a contrib/libs/expat/libcontrib-libs-expat.a contrib/libs/poco/Foundation/liblibs-poco-Foundation.a contrib/libs/poco/JSON/liblibs-poco-JSON.a contrib/libs/poco/XML/liblibs-poco-XML.a contrib/libs/poco/Util/liblibs-poco-Util.a contrib/libs/lzma/libcontrib-libs-lzma.a yql/essentials/public/udf/support/libpublic-udf-support.a contrib/restricted/boost/program_options/librestricted-boost-program_options.a contrib/restricted/dragonbox/libdragonbox.a contrib/libs/poco/Net/liblibs-poco-Net.a contrib/libs/poco/Crypto/liblibs-poco-Crypto.a contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a contrib/libs/apache/avro/liblibs-apache-avro.a ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a ydb/library/yql/providers/s3/events/libproviders-s3-events.a ydb/library/yql/providers/s3/common/libproviders-s3-common.a contrib/libs/libxml/libcontrib-libs-libxml.a library/cpp/xml/init/libcpp-xml-init.a library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a library/cpp/xml/document/libcpp-xml-document.a ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a yql/essentials/minikql/datetime/libessentials-minikql-datetime.a ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a ydb/core/external_sources/libydb-core-external_sources.a ydb/core/filestore/core/libcore-filestore-core.a ydb/core/change_exchange/libydb-core-change_exchange.a ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a ydb/core/fq/libs/common/libfq-libs-common.a ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a ydb/library/yql/utils/plan/libyql-utils-plan.a ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a ydb/library/yql/providers/pq/common/libproviders-pq-common.a ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a ydb/core/tx/replication/common/libtx-replication-common.a ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a ydb/core/wrappers/events/libcore-wrappers-events.a ydb/core/wrappers/libydb-core-wrappers.a ydb/core/tx/replication/service/libtx-replication-service.a ydb/core/backup/impl/libcore-backup-impl.a library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a ydb/core/persqueue/codecs/libcore-persqueue-codecs.a ydb/library/logger/libydb-library-logger.a ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a ydb/core/persqueue/libydb-core-persqueue.a ydb/library/query_actor/libydb-library-query_actor.a ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a ydb/core/backup/common/libcore-backup-common.a ydb/core/tx/locks/libcore-tx-locks.a ydb/services/lib/sharding/libservices-lib-sharding.a ydb/library/chunks_limiter/libydb-library-chunks_limiter.a ydb/core/tx/datashard/libcore-tx-datashard.a ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a ydb/core/mind/hive/libcore-mind-hive.a ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a ydb/core/tx/tiering/tier/libtx-tiering-tier.a ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a library/cpp/archive/liblibrary-cpp-archive.a contrib/libs/lua/libcontrib-libs-lua.a library/cpp/lua/liblibrary-cpp-lua.a library/cpp/config/liblibrary-cpp-config.a library/cpp/string_utils/csv/libcpp-string_utils-csv.a ydb/public/lib/json_value/libpublic-lib-json_value.a ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a ydb/library/arrow_parquet/libydb-library-arrow_parquet.a ydb/public/lib/ydb_cli/common/libcommon.a yql/essentials/sql/v1/format/libsql-v1-format.a ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a ydb/core/tx/schemeshard/libcore-tx-schemeshard.a ydb/core/kqp/session_actor/libcore-kqp-session_actor.a ydb/core/sys_view/auth/libcore-sys_view-auth.a ydb/core/sys_view/nodes/libcore-sys_view-nodes.a ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a ydb/core/sys_view/sessions/libcore-sys_view-sessions.a ydb/core/sys_view/storage/libcore-sys_view-storage.a ydb/core/sys_view/tablets/libcore-sys_view-tablets.a ydb/core/sys_view/libydb-core-sys_view.a ydb/core/tx/data_events/common/libtx-data_events-common.a ydb/core/tx/data_events/libcore-tx-data_events.a ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a ydb/library/ncloud/impl/liblibrary-ncloud-impl.a ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a ydb/core/grpc_caching/libydb-core-grpc_caching.a ydb/library/ycloud/impl/liblibrary-ycloud-impl.a ydb/core/security/libydb-core-security.a contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a contrib/libs/sasl/libcontrib-libs-sasl.a contrib/libs/openldap/libcontrib-libs-openldap.a ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a ydb/public/lib/fq/libpublic-lib-fq.a ydb/services/ext_index/common/libservices-ext_index-common.a ydb/core/grpc_services/libydb-core-grpc_services.a ydb/core/security/certificate_check/libcore-security-certificate_check.a ydb/core/cms/console/validators/libcms-console-validators.a ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a ydb/core/driver_lib/cli_base/libcli_base.a ydb/core/config/init/libcore-config-init.a ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a ydb/core/mind/bscontroller/libcore-mind-bscontroller.a ydb/core/kqp/counters/libcore-kqp-counters.a ydb/core/sys_view/processor/libcore-sys_view-processor.a ydb/core/test_tablet/libydb-core-test_tablet.a ydb/library/table_creator/libydb-library-table_creator.a ydb/services/metadata/request/libservices-metadata-request.a ydb/services/metadata/initializer/libservices-metadata-initializer.a ydb/services/metadata/manager/libservices-metadata-manager.a ydb/services/metadata/libydb-services-metadata.a ydb/core/tx/replication/controller/libtx-replication-controller.a ydb/core/mind/libydb-core-mind.a ydb/core/cms/console/libcore-cms-console.a ydb/core/mind/address_classification/libcore-mind-address_classification.a library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a ydb/core/ymq/proto/libcore-ymq-proto.a ydb/library/http_proxy/error/liblibrary-http_proxy-error.a ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a ydb/core/ymq/base/libcore-ymq-base.a ydb/core/ymq/queues/common/libymq-queues-common.a ydb/core/ymq/queues/fifo/libymq-queues-fifo.a ydb/core/ymq/queues/std/libymq-queues-std.a ydb/core/ymq/actor/libcore-ymq-actor.a ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a ydb/core/kqp/opt/logical/libkqp-opt-logical.a ydb/library/naming_conventions/libydb-library-naming_conventions.a ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a ydb/core/kqp/opt/physical/libkqp-opt-physical.a ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a ydb/core/kqp/opt/libcore-kqp-opt.a yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a yql/essentials/sql/v0/lexer/libsql-v0-lexer.a yql/essentials/sql/v0/libessentials-sql-v0.a ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a yql/essentials/providers/pg/provider/libproviders-pg-provider.a ydb/core/kqp/host/libcore-kqp-host.a ydb/core/kqp/compile_service/libcore-kqp-compile_service.a ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a ydb/core/kqp/rm_service/libcore-kqp-rm_service.a ydb/core/kqp/topics/libcore-kqp-topics.a ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a ydb/core/tx/tiering/libcore-tx-tiering.a ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a ydb/services/metadata/secret/libservices-metadata-secret.a ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a ydb/core/statistics/database/libcore-statistics-database.a ydb/core/statistics/service/libcore-statistics-service.a ydb/core/kqp/gateway/libcore-kqp-gateway.a ydb/core/kqp/node_service/libcore-kqp-node_service.a ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a ydb/core/fq/libs/config/libfq-libs-config.a ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a ydb/library/db_pool/libydb-library-db_pool.a ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a ydb/core/fq/libs/compute/common/liblibs-compute-common.a ydb/core/kqp/workload_service/libcore-kqp-workload_service.a ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a ydb/core/kqp/libydb-core-kqp.a ydb/services/lib/actors/libservices-lib-actors.a ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a ydb/services/persqueue_v1/libydb-services-persqueue_v1.a ydb/core/client/server/libcore-client-server.a ydb/core/quoter/libydb-core-quoter.a ydb/library/actors/testlib/common/libactors-testlib-common.a ydb/library/actors/testlib/liblibrary-actors-testlib.a ydb/core/testlib/actors/libcore-testlib-actors.a ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a ydb/core/tx/program/libcore-tx-program.a ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a ydb/core/util/evlog/libcore-util-evlog.a ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a ydb/core/tx/general_cache/service/libtx-general_cache-service.a ydb/core/tx/general_cache/source/libtx-general_cache-source.a ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a ydb/core/tx/priorities/usage/libtx-priorities-usage.a ydb/core/tx/priorities/service/libtx-priorities-service.a ydb/core/tx/time_cast/libcore-tx-time_cast.a ydb/core/tx/tracing/usage/libtx-tracing-usage.a ydb/core/tx/tracing/service/libtx-tracing-service.a ydb/core/tx/columnshard/libcore-tx-columnshard.a ydb/core/kesus/proxy/libcore-kesus-proxy.a ydb/services/kesus/libydb-services-kesus.a ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a ydb/services/ydb/libydb-services-ydb.a ydb/core/testlib/basics/libcore-testlib-basics.a yql/essentials/sql/pg/libessentials-sql-pg.a yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a yt/yt/library/skiff_ext/libyt-library-skiff_ext.a yt/yt/client/formats/libyt-client-formats.a yt/yt/client/arrow/fbs/libclient-arrow-fbs.a yt/yt/library/column_converters/libyt-library-column_converters.a contrib/libs/yaml/libcontrib-libs-yaml.a yt/yt/library/formats/libyt-library-formats.a yt/yt/client/arrow/libyt-client-arrow.a yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a -Wl,--end-group -rdynamic -ldl -lrt -Wl,--no-as-needed -Wl,--gdb-index -fuse-ld=lld --ld-path=/home/runner/.ya/tools/v4/7793618601/bin/ld.lld -Wl,--no-rosegment -Wl,--build-id=sha1 -lrt -ldl -lutil -nodefaultlibs -lpthread -lc -lm -Wl,--gc-sections -Wl,-no-pie failed with exit code 1 in /home/runner/.ya/build/build_root/43nv/001237 ld.lld: error: undefined symbol: NFq::MakeYqlAnalyticsHttpProxyId() >>> referenced by kqp_federated_query_helpers.cpp:116 (/-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:116) >>> kqp_federated_query_helpers.cpp.o:(NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&)) in archive ydb/core/kqp/federated_query/libcore-kqp-federated_query.a ld.lld: error: undefined symbol: NFq::MakeDatabaseResolverActorId() >>> referenced by kqp_federated_query_helpers.cpp:122 (/-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:122) >>> kqp_federated_query_helpers.cpp.o:(NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&)) in archive ydb/core/kqp/federated_query/libcore-kqp-federated_query.a ld.lld: error: undefined symbol: NFq::CreateDatabaseResolver(NActors::TActorId, std::__y1::shared_ptr) >>> referenced by kqp_federated_query_helpers.cpp:124 (/-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:124) >>> kqp_federated_query_helpers.cpp.o:(NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&)) in archive ydb/core/kqp/federated_query/libcore-kqp-federated_query.a clang++: error: linker command failed with exit code 1 (use -v to see invocation) >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> KqpQuery::QueryClientTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> KqpExplain::LimitOffset >> KqpStats::MultiTxStatsFullYql >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> KqpExplain::SortStage >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> KqpStats::JoinNoStatsYql >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpQuery::OlapCreateAsSelect_Simple >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> KqpStats::MultiTxStatsFullExpYql >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinNoStatsScan >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> DataStreams::TestUnsupported [GOOD] >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> DataStreams::TestPutRecords [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpExplain::UpdateConditional+UseSink >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan [GOOD] >> KqpStats::DeferredEffects-UseSink >> KqpExplain::MultiJoinCteLinks [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> KqpExplain::UpdateConditional+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink [GOOD] >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryExplain >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs >> KqpQuery::QueryFromSqs [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> TOlapReboots::AlterTtlSettings [GOOD] >> DataStreams::TestReservedConsumersMetering [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable >> IndexBuildTestReboots::DropIndexWithDataColumns [GOOD] >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnViewsAndTables [GOOD] >> IndexBuildTestReboots::DropIndex [GOOD] >> TErasureTypeTest::TestAllSpecies2of2 [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnSingleShardTableWithChangefeed [GOOD] >> TRtmrTestReboots::CreateRtmrVolumeWithReboots [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> CompositeConveyorTests::Test10xMultiDistribution [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnViewsAndTables [GOOD] >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> DataStreams::TestInvalidRetentionCombinations [GOOD] >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> TExportToS3WithRebootsTests::ShouldSucceedOnSingleShardTableWithChangefeed [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable >> TExportToS3WithRebootsTests::CancelShouldSucceedOnViewsAndTables [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables >> BasicUsage::RecreateObserver [GOOD] >> Secret::SimpleQueryService [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> TExportToS3WithRebootsTests::ShouldSucceedOnMultiShardTable [GOOD] >> Secret::Simple [GOOD] >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3WithRebootsTests::ShouldSucceedOnManyTables [GOOD] >> IndexBuildTestReboots::BaseCaseWithDataColumns [GOOD] >> IndexBuildTestReboots::BaseCase [GOOD] >> IndexBuildTestReboots::IndexPartitioning [GOOD] >> Secret::Validation [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> TExportToS3WithRebootsTests::ForgetShouldSucceedOnManyTables [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] >> KqpLimits::QSReplySize+useSink >> KqpLimits::QSReplySize+useSink [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnManyTables [GOOD] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed >> TBsVDiskManyPutGet::ManyPutRangeGetCompactionIndexOnly [GOOD] >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly >> TBsVDiskManyPutGet::ManyPutRangeGet2ChannelsIndexOnly [GOOD] >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize >> TBsVDiskManyPutGetCheckSize::ManyPutGetCheckSize [GOOD] >> TExportToS3WithRebootsTests::CancelShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3WithRebootsTests::CancelOnSingleShardTableWithChangefeed [GOOD] Number of suites skipped due to a failed build: 386, skipped by size: 36 ------ sole chunk ran 7 tests (total:238.28s - setup:0.04s test:238.08s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 16.6G (17438804K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 31075 46.6M 46.6M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 31436 32.5M 20.8M 7.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 31906 48.0M 48.0M 23.0M └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff/test_tool.args 33817 15.9G 16.5G 16.5G └─ ydb-core-tablet_flat-ut_large --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/ytest.report.tra Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tablet_flat/ut_large/test-results/unittest/testing_out_stuff/stderr ydb/library/yql/dq/actors/spilling/ut [size:medium] nchunks:10 ------ [7/10] chunk ran 1 test (total:1.30s - setup:0.02s test:1.18s) [crashed] DqSpillingFileTests::ThreadPoolQueueOverflow [default-linux-x86_64-relwithdebinfo] (0.00s) Test crashed (return code: -11) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/testing_out_stuff/DqSpillingFileTests.ThreadPoolQueueOverflow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/yql/dq/actors/spilling/ut/test-results/unittest/testing_out_stuff/DqSpillingFileTests.ThreadPoolQueueOverflow.out ------ FAIL: 10 - GOOD, 1 - CRASHED ydb/library/yql/dq/actors/spilling/ut ------ sole chunk ran 3 tests (total:15.61s - setup:0.02s test:15.49s) Info: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/actors/cppcoro/corobenchmark/test-results/corobenchmark/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/actors/cppcoro/corobenchmark/test-results/corobenchmark/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/library/actors/cppcoro/corobenchmark/test-results/corobenchmark/testing_out_stuff/stderr Total 396 suites: 395 - GOOD 1 - FAIL Total 2986 tests: 2985 - GOOD 1 - CRASHED SOME TESTS DIDN'T RUN DUE TO BUILD ERRORS Cache efficiency ratio is 96.20% (53787 of 55912). Local: 0 (0.00%), dist: 4075 (7.29%), by dynamic uids: 0 (0.00%), avoided: 49712 (88.91%) Dist cache download: count=3645, size=22.81 GiB, speed=126.94 MiB/s Disk usage for tools/sdk at least 303.33 MiB Additional disk space consumed for build cache 246.15 GiB Critical path: [ 5421 ms] [CC] [KMO-RMnpjv8oAiXc2Ph4Ig default-linux-x86_64 relwithdebinfo]: $(SOURCE_ROOT)/ydb/library/actors/core/actorsystem.cpp [started: 0 (1751975650459), finished: 5421 (1751975655880)] [ 61 ms] [AR] [-jmjBBj5ibuimDKqCdUbTg default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/library/actors/core/liblibrary-actors-core.a [started: 6294 (1751975656753), finished: 6355 (1751975656814)] [ 1652 ms] [LD] [1m1bfulxuRlbQnWSBJGFuQ default-linux-x86_64 relwithdebinfo]: $(BUILD_ROOT)/ydb/core/blobstorage/ut_vdisk/ydb-core-blobstorage-ut_vdisk [started: 7989 (1751975658448), finished: 9641 (1751975660100)] [568184 ms] [TM] [rnd-6232492428653133501 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_vdisk/unittest [started: 13859 (1751975664318), finished: 582043 (1751976232502)] Time from start: 735054.8491210938 ms, time elapsed by graph 575318 ms, time diff 159736.84912109375 ms. The longest 10 tasks: [568184 ms] [TM] [rnd-6232492428653133501 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_vdisk/unittest [started: 1751975664318, finished: 1751976232502] [437352 ms] [TM] [rnd-7084974871248880779 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/ut_vdisk/unittest [started: 1751975662827, finished: 1751976100179] [430750 ms] [TM] [rnd-11197011938882963071 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/pdisk/ut/unittest [started: 1751975669727, finished: 1751976100477] [390496 ms] [TM] [rnd-13348630650640755055 default-linux-x86_64 relwithdebinfo]: ydb/core/erasure/ut/unittest [started: 1751975739710, finished: 1751976130206] [322059 ms] [TM] [rnd-13435726279946296294 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/benchmarks_init/py3test [started: 1751975742528, finished: 1751976064587] [301954 ms] [TM] [rnd-4786519033366470959 default-linux-x86_64 relwithdebinfo]: ydb/core/blobstorage/vdisk/repl/ut/unittest [started: 1751975662880, finished: 1751975964834] [238945 ms] [TM] [rnd-okcbdd729li680xn default-linux-x86_64 relwithdebinfo]: ydb/core/tablet_flat/ut_large/unittest [started: 1751975675140, finished: 1751975914085] [235463 ms] [TM] [rnd-18214594708512457634 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/benchmarks_init/py3test [started: 1751975733346, finished: 1751975968809] [222962 ms] [TM] [rnd-15926086674518218942 default-linux-x86_64 relwithdebinfo]: ydb/tests/functional/benchmarks_init/py3test [started: 1751975742810, finished: 1751975965772] [216570 ms] [TM] [rnd-13914749950062141877 default-linux-x86_64 relwithdebinfo]: ydb/core/erasure/ut/unittest [started: 1751975739154, finished: 1751975955724] Total time by type: [21567615 ms] [TM] [count: 1367, ave time 15777.33 msec] [14908794 ms] [prepare:get from dist cache] [count: 4075, ave time 3658.60 msec] [ 3657665 ms] [LD] [count: 213, ave time 17172.14 msec] [ 2155405 ms] [prepare:put to dist cache] [count: 197, ave time 10941.14 msec] [ 1557268 ms] [TS] [count: 440, ave time 3539.25 msec] [ 366944 ms] [prepare:bazel-store] [count: 3, ave time 122314.67 msec] [ 304907 ms] [prepare:tools] [count: 21, ave time 14519.38 msec] [ 255542 ms] [TA] [count: 72, ave time 3549.19 msec] [ 185107 ms] [CC] [count: 9, ave time 20567.44 msec] [ 91385 ms] [prepare:put into local cache, clean build dir] [count: 3802, ave time 24.04 msec] [ 61742 ms] [prepare:AC] [count: 4, ave time 15435.50 msec] [ 15459 ms] [PR] [count: 1, ave time 15459.00 msec] [ 3850 ms] [prepare:resources] [count: 2, ave time 1925.00 msec] [ 1579 ms] [AR] [count: 8, ave time 197.38 msec] [ 1388 ms] [AS] [count: 1, ave time 1388.00 msec] [ 1015 ms] [UN] [count: 3, ave time 338.33 msec] [ 807 ms] [PK] [count: 2, ave time 403.50 msec] [ 666 ms] [SB] [count: 2, ave time 333.00 msec] [ 561 ms] [BI] [count: 1, ave time 561.00 msec] [ 287 ms] [BN] [count: 1, ave time 287.00 msec] [ 279 ms] [ld] [count: 2, ave time 139.50 msec] [ 249 ms] [CF] [count: 2, ave time 124.50 msec] [ 120 ms] [CP] [count: 1, ave time 120.00 msec] [ 63 ms] [prepare:clean] [count: 3, ave time 21.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 23380425 ms (85.81%) Total run tasks time - 27245607 ms Configure time - 36.9 s Statistics overhead 1641 ms Warn: Test [project=ydb/core/kqp/gateway/ut, name=gtest] (uid=rnd-vzlrner32vlb3kyc): Infrastructure error - contact devtools@ for details. Suite build deps: [nWSR8mxp_2yQiztLrU2KXQ {'project_path': 'ydb/core/kqp/gateway/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/basic_example, name=gtest] (uid=rnd-01mn297t0c2k0ihj): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VDdpln3gW84-8k24B_DNgQ {'project_path': 'ydb/public/sdk/cpp/tests/integration/basic_example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/bulk_upsert, name=gtest] (uid=rnd-wj7pbudg3iah64v3): Infrastructure error - contact devtools@ for details. Suite build deps: [0Qyf4WuPiqygD1i6FLD2tw {'project_path': 'ydb/public/sdk/cpp/tests/integration/bulk_upsert', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/server_restart, name=gtest] (uid=rnd-1iqfdwcrbtwvotgz): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [dGumwYSFHM7acnKMsXToRw {'project_path': 'ydb/public/sdk/cpp/tests/integration/server_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/tools/local_ydb, name=import_test] (uid=rnd-pw2tcs2dmzr4lgor): Infrastructure error - contact devtools@ for details. Suite build deps: [FzzzGBdMUOxR_flajqmjGA {'project_path': 'ydb/public/tools/local_ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mhCyxXngbTw02ZAy20xMBQ {'project_path': 'ydb/public/tools/local_ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/s3, name=import_test] (uid=rnd-c9mlc55w4mivmf62): Infrastructure error - contact devtools@ for details. Suite build deps: [Zd4996ODGBwuhs5K1R1y1g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/cms, name=import_test] (uid=rnd-6dow2gbaootlrxaw): Infrastructure error - contact devtools@ for details. Suite build deps: [2nTzn1ldkfh9muS7HZuObw {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/large, name=import_test] (uid=rnd-px2usx3n9uomb97q): Infrastructure error - contact devtools@ for details. Suite build deps: [K0GJ4wT0KNSLi7gJ4ZthgQ {'project_path': 'ydb/tests/functional/tpc/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=import_test] (uid=rnd-mfd36twrveus12sg): Infrastructure error - contact devtools@ for details. Suite build deps: [OuZpGgEj7jnCgkIGQwDYqw {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/load, name=import_test] (uid=rnd-5pmbm9hkl6kvjy6x): Infrastructure error - contact devtools@ for details. Suite build deps: [BgYFRD4Accd4XFxT9AvZ7w {'project_path': 'ydb/tests/olap/load', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/driver, name=import_test] (uid=rnd-frl98aqk8iwua5og): Infrastructure error - contact devtools@ for details. Suite build deps: [58reXgwqti57mG1-gdItsg {'project_path': 'ydb/tests/tools/nemesis/driver', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [8Yiuvrmr7LP75gAz6Lpszg {'project_path': 'ydb/tests/tools/nemesis/driver', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tools/cfg/bin, name=import_test] (uid=rnd-yar2raiosbrt19jp): Infrastructure error - contact devtools@ for details. Suite build deps: [ZAzd7gghKmq9jrtEIgmWWg {'project_path': 'ydb/tools/cfg/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_WyPEQJEqUOqLiix9ZDbug {'project_path': 'ydb/tools/cfg/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/tests, name=py3test] (uid=rnd-j43j8w1k8ojasqta): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [M-OGqqNBx631GzOHbV6tXg {'project_path': 'ydb/core/viewer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/example, name=py3test] (uid=rnd-974yhu34ur1kiifm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bf1789YZ5Bn1q4Z9Fo0ouQ {'project_path': 'ydb/tests/example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/common, name=py3test] (uid=rnd-llqjose9qgomrbex): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [IJNiR4ipPHFvpDXrW9eTeg {'project_path': 'ydb/tests/fq/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/http_api, name=py3test] (uid=rnd-f63yzndb81fgp8q8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sTN3d-q0JV4IpRJzqwUmMg {'project_path': 'ydb/tests/fq/http_api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/mem_alloc, name=py3test] (uid=rnd-9mg4iunvc3furr0x): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bMd40EeiJf4Ux4GI2Ycnjg {'project_path': 'ydb/tests/fq/mem_alloc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/multi_plane, name=py3test] (uid=rnd-dadjzwtjqgjm8uco): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xnT5c5swn4gL-P7LaGfIlQ {'project_path': 'ydb/tests/fq/multi_plane', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/plans, name=py3test] (uid=rnd-j0jhz209q1s5k1mi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wtoWF6_ldp-CrGCdip-XOg {'project_path': 'ydb/tests/fq/plans', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/restarts, name=py3test] (uid=rnd-oh1wm0jrbya0uo3t): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [dEZKzNcj_nZbUS3zA-XTPw {'project_path': 'ydb/tests/fq/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/s3, name=py3test] (uid=rnd-3jz2cyg2pitkcqf7): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Zd4996ODGBwuhs5K1R1y1g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/solomon, name=py3test] (uid=rnd-7otzkbrzu6lf1hpk): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WiKGu27fC3ule5-gXFfxyg {'project_path': 'ydb/library/yql/tools/solomon_emulator_grpc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pxdo6wQmKt-W6HAf_9_d3Q {'project_path': 'ydb/tests/fq/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yds, name=py3test] (uid=rnd-l6tcfx3iejuyjb6n): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EuZvAY-l-8HKyxeY34RlQw {'project_path': 'ydb/tests/fq/yds', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part0, name=py3test] (uid=rnd-yvqz9wi2zw9vqy12): Infrastructure error - contact devtools@ for details. Suite build deps: [9_in8NM0C8IIn_zdDNZOoA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part1, name=py3test] (uid=rnd-42v9ckxphz6j1ykb): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [joCy2DrL0ucVtmj6M88UNQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part10, name=py3test] (uid=rnd-wromtaq6jwsk29ah): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sN_8ak4iCADn6ueF6r4Hgg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part11, name=py3test] (uid=rnd-v36i5j26xh58mx86): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eL1Va3mnYnRN3mGczA_hLw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part12, name=py3test] (uid=rnd-ldlshwk75tknrafe): Infrastructure error - contact devtools@ for details. Suite build deps: [IpyRdZy7Ko-UvnQxgW5AGA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part13, name=py3test] (uid=rnd-gab773z4uc9pdfzb): Infrastructure error - contact devtools@ for details. Suite build deps: [Fa61RteTQSw8BmhPCfIl3w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part14, name=py3test] (uid=rnd-wstkd247jnoa1ues): Infrastructure error - contact devtools@ for details. Suite build deps: [Bb8ZHo1QX9D46rynKT12Qw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part15, name=py3test] (uid=rnd-5tbnkbl6jkpz3byd): Infrastructure error - contact devtools@ for details. Suite build deps: [TRGvXq1jcxmxrPzpM4jThQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part16, name=py3test] (uid=rnd-5334toeunsy2fo63): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ngPQ92q7hz1mK82oNG0UiQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part17, name=py3test] (uid=rnd-oyj7gd8ghuurplgq): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jXk9FHXODGEBoumPECjpig {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part18, name=py3test] (uid=rnd-k0wid5w83as51fk1): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nwsURidSUBMKZ6OQ6J4i5w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part19, name=py3test] (uid=rnd-pehnused1zyxtrj0): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lmhRPPU_qdlgpR5JjV3K3w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part2, name=py3test] (uid=rnd-yavnjp4916tny5k1): Infrastructure error - contact devtools@ for details. Suite build deps: [5avbQuRLI0__EjEImT5Izg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part3, name=py3test] (uid=rnd-sp3cvix263swxaws): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_USDmtQAF-8N0DSk744bYA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part4, name=py3test] (uid=rnd-i8hnctcxs1h9zafp): Infrastructure error - contact devtools@ for details. Suite build deps: [4AHmjqchxvKooTbryhDnNg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part5, name=py3test] (uid=rnd-js344863lw9ldavz): Infrastructure error - contact devtools@ for details. Suite build deps: [SotyYHEcqgTzw9cRRieBGg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part6, name=py3test] (uid=rnd-eavew2jmwb1alkv0): Infrastructure error - contact devtools@ for details. Suite build deps: [Bkv2jahr8Gve-12SRSFBNA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part7, name=py3test] (uid=rnd-vhgng9pl2ir8vgzf): Infrastructure error - contact devtools@ for details. Suite build deps: [49ZalB0z2EDhInKzir_Ggg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part8, name=py3test] (uid=rnd-a1cyvlpd4h7sarg4): Infrastructure error - contact devtools@ for details. Suite build deps: [VKE60dFALzg8FtUgT2FXkg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part9, name=py3test] (uid=rnd-6gfrd8ll57qv2avn): Infrastructure error - contact devtools@ for details. Suite build deps: [SQjTQmbE5eH0VVIojShahA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/functional/api, name=py3test] (uid=rnd-r64rzx3utv9xidob): Infrastructure error - contact devtools@ for details. Suite build deps: [8rJdyosdsZEwlugb8ib-fw {'project_path': 'ydb/tests/functional/api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/audit, name=py3test] (uid=rnd-b7bmbhvmmbnjnc0d): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Htg8xB6RJfiHlSQ271G_0Q {'project_path': 'ydb/tests/functional/audit', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/autoconfig, name=py3test] (uid=rnd-3btv2hg8u74zio75): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uyFvGYwW_fYW6s-LhmdgOg {'project_path': 'ydb/tests/functional/autoconfig', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/blobstorage, name=py3test] (uid=rnd-zx91dn3a9iqkt0ds): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [abLLt3AkDqgYYTi0LTeEOw {'project_path': 'ydb/tests/functional/blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/canonical, name=py3test] (uid=rnd-hi9iwqcr1rvg9d18): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VB2dfHt9ye3kVrpHQkV-Aw {'project_path': 'ydb/tests/functional/canonical', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/clickbench, name=py3test] (uid=rnd-ods73oocbtcs47a1): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z4RERXY_VwWBkDMOWgtgOw {'project_path': 'ydb/tests/functional/clickbench', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/cms, name=py3test] (uid=rnd-itvf9nk1cmphrd9m): Infrastructure error - contact devtools@ for details. Suite build deps: [2nTzn1ldkfh9muS7HZuObw {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/config, name=py3test] (uid=rnd-fxfs2tkv0kamk68a): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [gzRqw1qqNSHhekgPbvLXSw {'project_path': 'ydb/tests/functional/config', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/encryption, name=py3test] (uid=rnd-lkp6wsr75ljd2zav): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jvR007kkGzrimi7jmN-IDQ {'project_path': 'ydb/tests/functional/encryption', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/hive, name=py3test] (uid=rnd-btgldc62vs0fooqf): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [T4Sz8jcKT9R9sNjx3L0pOg {'project_path': 'ydb/tests/functional/hive', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/limits, name=py3test] (uid=rnd-qq679dlsjz3r9fg4): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [HmcNyggn-Iwjvm8EgX00KA {'project_path': 'ydb/tests/functional/limits', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/minidumps, name=py3test] (uid=rnd-oacl4csxoaqm4ouy): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [gseJwhWBmRoxLcbkfOS_2A {'project_path': 'ydb/tests/functional/minidumps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/postgresql, name=py3test] (uid=rnd-3a3v7p22i2q6fm6h): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Z39cdQH9E_vPJyIH2W5Szg {'project_path': 'ydb/tests/functional/postgresql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [qMN0W7hm_mR5alJWAYWexA {'project_path': 'ydb/tests/functional/postgresql/psql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/query_cache, name=py3test] (uid=rnd-j0ihwacsaq5nnknv): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Ft3nCnA2tIVq4ZoTgpm_8g {'project_path': 'ydb/tests/functional/query_cache', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/rename, name=py3test] (uid=rnd-y6btrfmoon1ac2os): Infrastructure error - contact devtools@ for details. Suite build deps: [9hXHKoeAzz6mU1FLmf3gPA {'project_path': 'ydb/tests/functional/rename', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/restarts, name=py3test] (uid=rnd-mof6mbykkjebjlgp): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BxknTsFkUZ5Dr-QccbeB_g {'project_path': 'ydb/tests/functional/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/scheme_shard, name=py3test] (uid=rnd-hd6dfxzgt6nn3fvm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wUSeFpaSI_AwGR5d9cmHug {'project_path': 'ydb/tests/functional/scheme_shard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/scheme_tests, name=py3test] (uid=rnd-5cbwvsjey3pw4g2f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [k7sjN5u_QrKtUDxlV16CyA {'project_path': 'ydb/tests/functional/scheme_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/script_execution, name=py3test] (uid=rnd-2biy6treweg8es68): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cgjTRdkrZ_4d7BRJ0eJnig {'project_path': 'ydb/tests/functional/script_execution', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serializable, name=py3test] (uid=rnd-ysig7rvd2z84e4xq): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [GkHyQNo5oKNB7jVcO3QwCg {'project_path': 'ydb/tests/functional/serializable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serverless, name=py3test] (uid=rnd-m88pk9cg58jwbhs0): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lZiVbNttqYAg8FRbEMdofA {'project_path': 'ydb/tests/functional/serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/cloud, name=py3test] (uid=rnd-qvtg82251aa127an): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jvOkQA9X_2xMtbCxc2DEIA {'project_path': 'ydb/tests/functional/sqs/cloud', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/common, name=py3test] (uid=rnd-pa47b67m69vvx36f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [JhgeeDe0vMIfPxdCMs0pnQ {'project_path': 'ydb/tests/functional/sqs/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/large, name=py3test] (uid=rnd-lxjmf8vvxwbo5k2m): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LrJOODP0_b-tm3QwqNBhhQ {'project_path': 'ydb/tests/functional/sqs/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/fifo, name=py3test] (uid=rnd-5pybi7nwjm0q8b8t): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [T_WmT_UFGgVQ4nKbQJqFcQ {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/fifo', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/std, name=py3test] (uid=rnd-63lyfanr89tosa69): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Yncn6JscsW36xAZ9rB4U3Q {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/std', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/messaging, name=py3test] (uid=rnd-u5dzgnb4u4k750ri): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PTLwQVzmaYU1HKzWNIrkbg {'project_path': 'ydb/tests/functional/sqs/messaging', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/multinode, name=py3test] (uid=rnd-fnsgeulgog22vksj): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YKojzFSQQFch1ipu_je4gg {'project_path': 'ydb/tests/functional/sqs/multinode', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/with_quotas, name=py3test] (uid=rnd-bkwsbxfxkopmunjh): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [kebFv3kZRpf3nS6N5YlGow {'project_path': 'ydb/tests/functional/sqs/with_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tenants, name=py3test] (uid=rnd-74xhvsqbru8adzvi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RIadDrCh0I16yrGqcrdrMA {'project_path': 'ydb/tests/functional/tenants', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium, name=py3test] (uid=rnd-1h96d4be7q01m0ca): Infrastructure error - contact devtools@ for details. Suite build deps: [3NiieuagZAv2XidnTZGWtA {'project_path': 'ydb/tests/functional/tpc/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ttl, name=py3test] (uid=rnd-54ku8hvsv2tvj55b): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mQznU_JUfbVnTr0x7pqQJw {'project_path': 'ydb/tests/functional/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/wardens, name=py3test] (uid=rnd-biaghif9z4d40rg8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i0M1jluoKuwMp7bFzax8HA {'project_path': 'ydb/tests/functional/wardens', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=py3test] (uid=rnd-sh4ryb6jt8qwjagk): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [OuZpGgEj7jnCgkIGQwDYqw {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/library/ut, name=py3test] (uid=rnd-o1j9kl2txxn15xcm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [E79JQHP1kTkHXwC-essinw {'project_path': 'ydb/tests/library/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap, name=py3test] (uid=rnd-feh1txcw6svdjqd8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [JIP_tcgnLjoMby5_zelnWg {'project_path': 'ydb/tests/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/scenario, name=py3test] (uid=rnd-p7awj5zhrsfr2w5u): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wR3fDyzrd1PP7enFLtlyRQ {'project_path': 'ydb/tests/olap/scenario', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/olap/ttl_tiering, name=py3test] (uid=rnd-e2l53yavhix5w1aa): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [khozwlRAI-Ug_vRqgHg45g {'project_path': 'ydb/tests/olap/ttl_tiering', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/sql, name=py3test] (uid=rnd-fen4xt8yp018kqaz): Infrastructure error - contact devtools@ for details. Suite build deps: [16fDwbQZWZCGXLvPd7Lxsw {'project_path': 'ydb/tests/sql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kv/tests, name=py3test] (uid=rnd-bhv59sc25wijhj0v): Infrastructure error - contact devtools@ for details. Suite build deps: [BL5zslKUCHeCwdIttX-FsA {'project_path': 'ydb/tests/stress/kv/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/log/tests, name=py3test] (uid=rnd-uh7i9p54hgyfn56c): Infrastructure error - contact devtools@ for details. Suite build deps: [1ANIX2QRHr0yNhdjlGtmAg {'project_path': 'ydb/tests/stress/log/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/olap_workload/tests, name=py3test] (uid=rnd-08latk87jfjbp2xl): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [kB-aQv6tay8RJyK7h4nyTw {'project_path': 'ydb/tests/stress/olap_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/oltp_workload/tests, name=py3test] (uid=rnd-5dgo7boay7yaf290): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fx4HsD3j_bqIBD15FcPfNA {'project_path': 'ydb/tests/stress/oltp_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/simple_queue/tests, name=py3test] (uid=rnd-mwuwcgmmv5ifj1c4): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EbOS91ryZ5LR5qz-_cPj-g {'project_path': 'ydb/tests/stress/simple_queue/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/transfer/tests, name=py3test] (uid=rnd-rimw1r1sazjgez2m): Infrastructure error - contact devtools@ for details. Suite build deps: [AIDIv6Yn994TwYyhMytsIg {'project_path': 'ydb/tests/stress/transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wVaQDKg1-xMsLuwqzQYU6Q {'project_path': 'ydb/tests/stress/transfer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/tools/kqprun/tests, name=py3test] (uid=rnd-igm0y9naod1oc9vx): Infrastructure error - contact devtools@ for details. Suite build deps: [Hs6--qc0PmD1eVf6W887Ng {'project_path': 'ydb/tests/tools/kqprun/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Z9YUszgy9LOHFT6s1RgJYw {'project_path': 'ydb/tests/tools/kqprun/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/ut, name=py3test] (uid=rnd-astxv1c0p8hnu8d9): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Gyb1KY2jXptkKN3kcGUOnw {'project_path': 'ydb/tests/tools/nemesis/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/pq_read/test, name=py3test] (uid=rnd-y9cfdnuj0y7m1ltm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aKmCNyvhFGCLB_FqSLeqiA {'project_path': 'ydb/tests/tools/pq_read/test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part0, name=pytest] (uid=rnd-2ddsln7pcwt425db): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uyXg1XZGIQbQIwPEpXBtUA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part1, name=pytest] (uid=rnd-c6jomkna9cs8rb43): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tqQb7Bf1JhhXPL0Wqo6lVQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part10, name=pytest] (uid=rnd-e6cm33lxn4sahs3g): Infrastructure error - contact devtools@ for details. Suite build deps: [Wdy0Z0DpKWdwwgR5L3_Urw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part11, name=pytest] (uid=rnd-pf10eeknn07x45rw): Infrastructure error - contact devtools@ for details. Suite build deps: [0IluDTu_dg_ti3frsDcfNA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part12, name=pytest] (uid=rnd-9c8jehdbtvvv2uiy): Infrastructure error - contact devtools@ for details. Suite build deps: [4xI4-DTQloI_XoPv6KD3Ug {'project_path': 'ydb/library/yql/tests/sql/dq_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part13, name=pytest] (uid=rnd-hpi1bvslkhptfj6x): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [wyFduD84wQfYkq9ZIOzomA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part14, name=pytest] (uid=rnd-w5ydpoaluj1hu56z): Infrastructure error - contact devtools@ for details. Suite build deps: [Y6giBwe8oDL9vtYXvBa2_g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part15, name=pytest] (uid=rnd-18dtq4jwbijt16bf): Infrastructure error - contact devtools@ for details. Suite build deps: [Cc-9VW4pYo5gsVW5B2GAOA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part16, name=pytest] (uid=rnd-nq3mozj1ld7zw5xj): Infrastructure error - contact devtools@ for details. Suite build deps: [XbC8QBM1VdNwfOb-2VE0bA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part17, name=pytest] (uid=rnd-4f31cqkufngs44zi): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [g2T-TnqFgDJQsyzEGWIJFw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part18, name=pytest] (uid=rnd-vcuj5sn0vlxszshk): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oN9p2GA_dT7QtsgFX-c2ag {'project_path': 'ydb/library/yql/tests/sql/dq_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part19, name=pytest] (uid=rnd-hyewupw0qtgf06aj): Infrastructure error - contact devtools@ for details. Suite build deps: [JeJR1fKrW_4qTyuEf0TfdQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part2, name=pytest] (uid=rnd-jich0m3qguissvoc): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hr6pltADlp7T1vRJnuxD1g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part3, name=pytest] (uid=rnd-zaoaldorivs9xzem): Infrastructure error - contact devtools@ for details. Suite build deps: [XdZ_f_cU7iXjziDkuKKLPw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part4, name=pytest] (uid=rnd-3ar2kgy9cgy4hwcr): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yDLXVQijS38DOB9uQKrsnQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part5, name=pytest] (uid=rnd-zy1mwb1jwbwf1uo1): Infrastructure error - contact devtools@ for details. Suite build deps: [-GOj-NiSAuFZnzKcu9gq1g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part6, name=pytest] (uid=rnd-oo0iu8l3efq61i3x): Infrastructure error - contact devtools@ for details. Suite build deps: [4f9PBi_ATGh2O2FlidorSw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part7, name=pytest] (uid=rnd-i9u41z5nvld6yrdi): Infrastructure error - contact devtools@ for details. Suite build deps: [-121gu5qfhRwgHe7JmJVDA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part8, name=pytest] (uid=rnd-yhrprxws2ufg6k14): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pvsQcxyjHwzYajsslwneRQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part9, name=pytest] (uid=rnd-fhmo8lx91tulllm2): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [qtJZNH5J3fSCqpmr6WnWzQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part0, name=pytest] (uid=rnd-xocow37q5ukms5vw): Infrastructure error - contact devtools@ for details. Suite build deps: [4UNJA1nGD2xe1c1YjuAQeQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part1, name=pytest] (uid=rnd-ldzecxobvubws8ra): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hN-rXNhRAZkmOkVtNfCL6g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part10, name=pytest] (uid=rnd-v23a7k0zfqiijd6i): Infrastructure error - contact devtools@ for details. Suite build deps: [0CCikKSfg3ADkb_spahYLw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part2, name=pytest] (uid=rnd-hz6qo5t1u2bnijnr): Infrastructure error - contact devtools@ for details. Suite build deps: [Vnvve7rjKvFhsVUzrwwycQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part3, name=pytest] (uid=rnd-s3rhd683w4efh6w0): Infrastructure error - contact devtools@ for details. Suite build deps: [Abd39QgMVw34pXuV5nqRZA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part4, name=pytest] (uid=rnd-yq9zbjsezptg3qo7): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fwcUoRt9F6gqvjFGVFvlqQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part5, name=pytest] (uid=rnd-mvhwyav7o26waxg4): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jLbN5VvPnP5y8BiWxj5bfg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part6, name=pytest] (uid=rnd-g3xabh61d8h9f42v): Infrastructure error - contact devtools@ for details. Suite build deps: [WRJ4bPoaWfndxyHd8r1kyg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part7, name=pytest] (uid=rnd-i2mz7y14fql179fm): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mhUl4Xg5fEv3rHp50ppuWQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part8, name=pytest] (uid=rnd-h1vxc8j7laq3nuas): Infrastructure error - contact devtools@ for details. Suite build deps: [88VwWzu4Yu_JzhyA5YGhTw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part9, name=pytest] (uid=rnd-dic294crq7134k5j): Infrastructure error - contact devtools@ for details. Suite build deps: [FT66HLSOEBqPpgHScnINPw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/apps/ydb/ut, name=unittest] (uid=rnd-tvv9aov9vmzwgewi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mHilgpo9TeBC_7MhIJ7tzw {'project_path': 'ydb/apps/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/actorlib_impl/ut, name=unittest] (uid=rnd-e6hlkw0xi70o3hx1): Infrastructure error - contact devtools@ for details. Suite build deps: [1vgsv2-KHR-wX5oK8dGQ1w {'project_path': 'ydb/core/actorlib_impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_table_writer, name=unittest] (uid=rnd-aa6wl3ljavu561un): Infrastructure error - contact devtools@ for details. Suite build deps: [HhiYFniY6UyPQHwrOLiAXA {'project_path': 'ydb/core/backup/impl/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/base/ut, name=unittest] (uid=rnd-baqxx7rexv47cmcw): Infrastructure error - contact devtools@ for details. Suite build deps: [5-oA5fo6hDt5AaY1_hvhVA {'project_path': 'ydb/core/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/backpressure/ut, name=unittest] (uid=rnd-4ues06i65kkgezsa): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xNXz-pD6-Mtl_sTZfmFnJQ {'project_path': 'ydb/core/blobstorage/backpressure/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut, name=unittest] (uid=rnd-8lf9aj6q6n0c8936): Infrastructure error - contact devtools@ for details. Suite build deps: [noo-s1pCctlAldfXPRgphQ {'project_path': 'ydb/core/blobstorage/dsproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_fat, name=unittest] (uid=rnd-hz2tvwvrlhqznzor): Infrastructure error - contact devtools@ for details. Suite build deps: [l6nT9IbwsP9nheUT615PRQ {'project_path': 'ydb/core/blobstorage/dsproxy/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_ftol, name=unittest] (uid=rnd-p0zam6c8271fi3cx): Infrastructure error - contact devtools@ for details. Suite build deps: [guRvh83k--S0q6PxiNkJKg {'project_path': 'ydb/core/blobstorage/dsproxy/ut_ftol', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_strategy, name=unittest] (uid=rnd-kdvu3tej10leg60u): Infrastructure error - contact devtools@ for details. Suite build deps: [cEDJxM8LlXa4CUQY3ypDFw {'project_path': 'ydb/core/blobstorage/dsproxy/ut_strategy', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/groupinfo/ut, name=unittest] (uid=rnd-gqk2k5zyflkafjwb): Infrastructure error - contact devtools@ for details. Suite build deps: [6VzOt66IvPANbSE1DeuqyA {'project_path': 'ydb/core/blobstorage/groupinfo/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut, name=unittest] (uid=rnd-523yy2cu2ix7fv5r): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v1HotOQCRtPtLTlnVo_liQ {'project_path': 'ydb/core/blobstorage/nodewarden/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut_sequence, name=unittest] (uid=rnd-fcljx9mcqldxv247): Infrastructure error - contact devtools@ for details. Suite build deps: [tOeqJYh6ptgQ4STat_XJmw {'project_path': 'ydb/core/blobstorage/nodewarden/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/pdisk/ut, name=unittest] (uid=rnd-340kvnvkkf7iraa5): Infrastructure error - contact devtools@ for details. Suite build deps: [57UB9kFfll7xo9iwxlh2VQ {'project_path': 'ydb/core/blobstorage/pdisk/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage, name=unittest] (uid=rnd-wni2bcmo12tx7c5k): Infrastructure error - contact devtools@ for details. Suite build deps: [h1_yhLOiJ41wJWGwvZXR5A {'project_path': 'ydb/core/blobstorage/ut_blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_balancing, name=unittest] (uid=rnd-9qpmkqas3jykhzkk): Infrastructure error - contact devtools@ for details. Suite build deps: [mYXZxrxEr0XhLZmSBZHBYA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_blob_depot, name=unittest] (uid=rnd-afhqng1eiefocav1): Infrastructure error - contact devtools@ for details. Suite build deps: [Ts3C4fHSFblM2KfayG8Vmw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_blob_depot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_donor, name=unittest] (uid=rnd-kucvasx8vnd0mfl1): Infrastructure error - contact devtools@ for details. Suite build deps: [cWazmh7lvaMV1ZFEpKL74Q {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_donor', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_huge, name=unittest] (uid=rnd-1iwb7syddcpa4dbb): Infrastructure error - contact devtools@ for details. Suite build deps: [qcAm6mZPduUZjC1ROmQXFA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_huge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk, name=unittest] (uid=rnd-cml4r0y8qgxe0kmz): Infrastructure error - contact devtools@ for details. Suite build deps: [SmeElTz9f9tG0Dumn6uxew {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk, name=unittest] (uid=rnd-hbfxkq9ss10bo5vp): Infrastructure error - contact devtools@ for details. Suite build deps: [DDS5n_Iw-36JWY-2BwfUhw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk, name=unittest] (uid=rnd-mrguz3px36xkjm1z): Infrastructure error - contact devtools@ for details. Suite build deps: [CSbBQ11xyJA9O2xZ7p63bQ {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart, name=unittest] (uid=rnd-bjaimq2etx7ttdde): Infrastructure error - contact devtools@ for details. Suite build deps: [iMX5s0KaKMcxLgMsCb-hMA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_testshard, name=unittest] (uid=rnd-neeva3jz0imnpbhm): Infrastructure error - contact devtools@ for details. Suite build deps: [Fz2eJXhlysL9FhRVSM1mOg {'project_path': 'ydb/core/blobstorage/ut_testshard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk, name=unittest] (uid=rnd-cymmaol0y15rimko): Infrastructure error - contact devtools@ for details. Suite build deps: [1m1bfulxuRlbQnWSBJGFuQ {'project_path': 'ydb/core/blobstorage/ut_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk2, name=unittest] (uid=rnd-5ow4b6h6ou0l4khc): Infrastructure error - contact devtools@ for details. Suite build deps: [eZoihKLz4i8Oqw2AV0UC8Q {'project_path': 'ydb/core/blobstorage/ut_vdisk2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/anubis_osiris/ut, name=unittest] (uid=rnd-fb14r5kn0ueoq3u2): Infrastructure error - contact devtools@ for details. Suite build deps: [-Z42BwdsSSM8aIJo8PMguQ {'project_path': 'ydb/core/blobstorage/vdisk/anubis_osiris/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hulldb/base/ut, name=unittest] (uid=rnd-fhfj5b01txh62p6e): Infrastructure error - contact devtools@ for details. Suite build deps: [XjhLRQx0fjAxON1IjTAg3Q {'project_path': 'ydb/core/blobstorage/vdisk/hulldb/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hulldb/cache_block/ut, name=unittest] (uid=rnd-gb9xwb2xfgtrr148): Infrastructure error - contact devtools@ for details. Suite build deps: [JPAzpeYGYKgzDRr9TB6q8Q {'project_path': 'ydb/core/blobstorage/vdisk/hulldb/cache_block/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hullop/ut, name=unittest] (uid=rnd-l7cmorfzxiv13d6p): Infrastructure error - contact devtools@ for details. Suite build deps: [ncrHJJ_bV2uozY2KCjxcVw {'project_path': 'ydb/core/blobstorage/vdisk/hullop/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/ingress/ut, name=unittest] (uid=rnd-tobgtu22wm22ojtl): Infrastructure error - contact devtools@ for details. Suite build deps: [0qllbksu--A0j3in-X_r5Q {'project_path': 'ydb/core/blobstorage/vdisk/ingress/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/skeleton/ut, name=unittest] (uid=rnd-wgmrrbb21xhr5pxe): Infrastructure error - contact devtools@ for details. Suite build deps: [VVNUu-iMzbZTL5tbr0lJdg {'project_path': 'ydb/core/blobstorage/vdisk/skeleton/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/syncer/ut, name=unittest] (uid=rnd-phxm1aw1axxmb3sa): Infrastructure error - contact devtools@ for details. Suite build deps: [t6VHY1z6ma_-uzMUCN9fNg {'project_path': 'ydb/core/blobstorage/vdisk/syncer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/synclog/ut, name=unittest] (uid=rnd-kpkjds918rnaszvb): Infrastructure error - contact devtools@ for details. Suite build deps: [tDYLZJdsvjjr6QiGsDT68A {'project_path': 'ydb/core/blobstorage/vdisk/synclog/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/minikql_compile/ut, name=unittest] (uid=rnd-c3mjv6qb9xr5p0yo): Infrastructure error - contact devtools@ for details. Suite build deps: [e-0akBM95roIUXFB46dGdA {'project_path': 'ydb/core/client/minikql_compile/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/server/ut, name=unittest] (uid=rnd-mb86h24z89pygnwj): Infrastructure error - contact devtools@ for details. Suite build deps: [_OWjI7UHPzioiw2p7CNEGQ {'project_path': 'ydb/core/client/server/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/ut, name=unittest] (uid=rnd-c9823qr9l2pmhmch): Infrastructure error - contact devtools@ for details. Suite build deps: [9N4hc78AAMN-bOwmzlB_BQ {'project_path': 'ydb/core/client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/ut, name=unittest] (uid=rnd-rxe2u014n3odnj9t): Infrastructure error - contact devtools@ for details. Suite build deps: [g7jNN0vMWgMW4AsNPxa1Yg {'project_path': 'ydb/core/cms/console/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/validators/ut, name=unittest] (uid=rnd-na74rtgiognhnbeh): Infrastructure error - contact devtools@ for details. Suite build deps: [1PqmQtXaCD6ZezkfcQ7g-A {'project_path': 'ydb/core/cms/console/validators/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut, name=unittest] (uid=rnd-wdg4nyllqca13rgu): Infrastructure error - contact devtools@ for details. Suite build deps: [ZfALacHLE_As6rcubcGKJg {'project_path': 'ydb/core/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel, name=unittest] (uid=rnd-g5bqpg98gm6c509k): Infrastructure error - contact devtools@ for details. Suite build deps: [MuX9aYdNdEf8n0vHSkydDQ {'project_path': 'ydb/core/cms/ut_sentinel', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel_unstable, name=unittest] (uid=rnd-9k2uoa81xkzhk73e): Infrastructure error - contact devtools@ for details. Suite build deps: [-cNMBXSS_JYVHMgjXO6LCg {'project_path': 'ydb/core/cms/ut_sentinel_unstable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/control/ut, name=unittest] (uid=rnd-azuktj5du5406h7b): Infrastructure error - contact devtools@ for details. Suite build deps: [eJQ49yi7FjWFkwtkV9VeIQ {'project_path': 'ydb/core/control/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/erasure/ut, name=unittest] (uid=rnd-jz7lng855v7kxg45): Infrastructure error - contact devtools@ for details. Suite build deps: [OK8cLUSOEmStI6yaLy5ySA {'project_path': 'ydb/core/erasure/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/external_sources/s3/ut, name=unittest] (uid=rnd-nixycrq78gtwcfyu): Infrastructure error - contact devtools@ for details. Suite build deps: [5Q42tZ7PoNSpXtQn8VijNQ {'project_path': 'library/recipes/docker_compose/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [DQ5nj5QlLB92bhWG641i4A {'project_path': 'library/recipes/docker_compose', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mLGyzs24isAiHycSns2hqA {'project_path': 'ydb/core/external_sources/s3/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpoint_storage/ut, name=unittest] (uid=rnd-mn1o8gkf246eaolv): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [W8refynzIGb4hpbozf-I3g {'project_path': 'ydb/core/fq/libs/checkpoint_storage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpointing/ut, name=unittest] (uid=rnd-52wn4w6yv5h1s5hx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yRpKIjbewAHm94DLJ5nHwA {'project_path': 'ydb/core/fq/libs/checkpointing/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/fq/libs/common/ut, name=unittest] (uid=rnd-wo4sbcjmt0bml3l6): Infrastructure error - contact devtools@ for details. Suite build deps: [CwkxALwq0AEaMmkF4UGh8w {'project_path': 'ydb/core/fq/libs/common/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/control_plane_proxy/ut, name=unittest] (uid=rnd-jom1xgogrc9xjwr6): Infrastructure error - contact devtools@ for details. Suite build deps: [FucJqSmjJUY8C7_8zw8DPw {'project_path': 'ydb/core/fq/libs/control_plane_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/result_formatter/ut, name=unittest] (uid=rnd-5w6vve7m0w1uacwc): Infrastructure error - contact devtools@ for details. Suite build deps: [IArvb-ATN2eL9TBLLpBsbg {'project_path': 'ydb/core/fq/libs/result_formatter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/format_handler/ut, name=unittest] (uid=rnd-qqym9h3c52no244f): Infrastructure error - contact devtools@ for details. Suite build deps: [jMkSgkMDUcv7QZWpnhZtLA {'project_path': 'ydb/core/fq/libs/row_dispatcher/format_handler/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/ut, name=unittest] (uid=rnd-dzere0b78wj4fw1s): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eyj9hqF4qrAbKg5UVJzKKw {'project_path': 'ydb/core/fq/libs/row_dispatcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/ydb/ut, name=unittest] (uid=rnd-m6vn1tmv2ej7ax0l): Infrastructure error - contact devtools@ for details. Suite build deps: [-Zl0PENHkEo8-uaFaQ6cgw {'project_path': 'ydb/core/fq/libs/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/shard/ut, name=unittest] (uid=rnd-dwwgumxj2x8vfqyl): Infrastructure error - contact devtools@ for details. Suite build deps: [rUq2HS9VxmOn-HnxHtKAFw {'project_path': 'ydb/core/graph/shard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/ut, name=unittest] (uid=rnd-m5omx5lzxr7d0yku): Infrastructure error - contact devtools@ for details. Suite build deps: [2rymvZn8nhFTW4Sl4eGJuw {'project_path': 'ydb/core/graph/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/tablet/ut, name=unittest] (uid=rnd-yk5vzq837kre28e9): Infrastructure error - contact devtools@ for details. Suite build deps: [NLXiAOwvFjQLNhqXilY71A {'project_path': 'ydb/core/grpc_services/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/ut, name=unittest] (uid=rnd-vxe3g53e64sc5m8o): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zFGyEvkiYtyHeenCtD9nPw {'project_path': 'ydb/core/grpc_services/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/grpc_streaming/ut, name=unittest] (uid=rnd-h6yphcgy8nzhtzxi): Infrastructure error - contact devtools@ for details. Suite build deps: [OzdoudPJ5WI8khB0kAavQQ {'project_path': 'ydb/core/grpc_streaming/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/health_check/ut, name=unittest] (uid=rnd-kh6wr3g1pc3mxyk8): Infrastructure error - contact devtools@ for details. Suite build deps: [FhhCZo0SwAU5qaV-H3FQcw {'project_path': 'ydb/core/health_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/http_proxy/ut, name=unittest] (uid=rnd-h7qvv99uh0nq9osd): Infrastructure error - contact devtools@ for details. Suite build deps: [08Qmtdy3roDQRRd_4XtP8w {'project_path': 'ydb/core/http_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/http_proxy/ut/inside_ydb_ut, name=unittest] (uid=rnd-o6e7br7a0qux0fnp): Infrastructure error - contact devtools@ for details. Suite build deps: [q3RBV_Z1N7rM2Vh8kTF57g {'project_path': 'ydb/core/http_proxy/ut/inside_ydb_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kafka_proxy/ut, name=unittest] (uid=rnd-g2w3stctymfxc5zm): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [txptHr_iTnb1KjOVoG1-4A {'project_path': 'ydb/core/kafka_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kesus/tablet/ut, name=unittest] (uid=rnd-egegkq3bp8m8uhsa): Infrastructure error - contact devtools@ for details. Suite build deps: [L_IrLYUP-Uzh7xavgUAwAg {'project_path': 'ydb/core/kesus/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut, name=unittest] (uid=rnd-2s309gcdhkvkhewx): Infrastructure error - contact devtools@ for details. Suite build deps: [q95q2QvBngTW3_thXIuq2w {'project_path': 'ydb/core/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut_trace, name=unittest] (uid=rnd-2vayida1eudeupzk): Infrastructure error - contact devtools@ for details. Suite build deps: [GLhZcjGUzLpSRycyVLpthQ {'project_path': 'ydb/core/keyvalue/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/proxy_service/ut, name=unittest] (uid=rnd-lq5bpuqcm8su1c8d): Infrastructure error - contact devtools@ for details. Suite build deps: [2-MS6xxFJwrOXHJpcTz-qA {'project_path': 'ydb/core/kqp/proxy_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/tests/kikimr_tpch, name=unittest] (uid=rnd-nd7agsijezlxjigp): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [KGoNEbTTIHSSfp8lA2hRmA {'project_path': 'yql/essentials/udfs/common/string', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [LY7YyY0X2K-Pu3gU2j8Vhw {'project_path': 'ydb/library/yql/udfs/common/datetime', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c8D_-SD2Srz7qbMAzjrsOA {'project_path': 'yql/essentials/udfs/common/datetime2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [kyKtgnZz-pAr_NCz6YPrMg {'project_path': 'ydb/core/kqp/tests/kikimr_tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ul_t95we-PSim33xXqRZAg {'project_path': 'yql/essentials/udfs/common/pire', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [y_l-_UH8k8vcNqfTjOTpTw {'project_path': 'yql/essentials/udfs/common/re2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/core/kqp/ut/batch_operations, name=unittest] (uid=rnd-864iju0w52b5q9ym): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u8iz401A4_WTq8wlnw4GBg {'project_path': 'ydb/core/kqp/ut/batch_operations', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/cost, name=unittest] (uid=rnd-6y3ae4cee9ejlm40): Infrastructure error - contact devtools@ for details. Suite build deps: [I9-qMhKVmMjldfaO63O9Hg {'project_path': 'ydb/core/kqp/ut/cost', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data, name=unittest] (uid=rnd-mkqlue8l1247zupo): Infrastructure error - contact devtools@ for details. Suite build deps: [TKq6-vG4oLBi8XfODLIaKw {'project_path': 'ydb/core/kqp/ut/data', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data_integrity, name=unittest] (uid=rnd-q1apcycgtu8fo8vi): Infrastructure error - contact devtools@ for details. Suite build deps: [cMJMnG8gdcf0WGS2VkAAmA {'project_path': 'ydb/core/kqp/ut/data_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/generic_ut, name=unittest] (uid=rnd-69n98bn293pwu56z): Infrastructure error - contact devtools@ for details. Suite build deps: [Ergvqe2N5MYd6t4lfM3coQ {'project_path': 'ydb/core/kqp/ut/federated_query/generic_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/s3, name=unittest] (uid=rnd-x6f70ux3n8etajfl): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WBaOymL41lGZ_vqq_p-kaA {'project_path': 'ydb/core/kqp/ut/federated_query/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/idx_test, name=unittest] (uid=rnd-mzv1g7bv7ctzwavx): Infrastructure error - contact devtools@ for details. Suite build deps: [ej78KEpFKy2JALBkpM9-sQ {'project_path': 'ydb/core/kqp/ut/idx_test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/indexes, name=unittest] (uid=rnd-jdvoiu9p2iuljwgo): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u9Sab47po9EhvFf4R7niBA {'project_path': 'ydb/core/kqp/ut/indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/join, name=unittest] (uid=rnd-bqsn3xfshqgvt63v): Infrastructure error - contact devtools@ for details. Suite build deps: [Lu60nPaZtluxn2Gl7XnugA {'project_path': 'ydb/core/kqp/ut/join', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/olap, name=unittest] (uid=rnd-z7vqr7dfn81tso99): Infrastructure error - contact devtools@ for details. Suite build deps: [G75Mol3eu1QtVyoNo98H-A {'project_path': 'ydb/core/kqp/ut/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/opt, name=unittest] (uid=rnd-gwxwk6uwka1h22dr): Infrastructure error - contact devtools@ for details. Suite build deps: [c6lRt6jQ_xMQ9wPl28RPMw {'project_path': 'ydb/core/kqp/ut/opt', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/perf, name=unittest] (uid=rnd-vur7a2qzedc196s4): Infrastructure error - contact devtools@ for details. Suite build deps: [Wr2nGSPzxvspueD8sHSdkw {'project_path': 'ydb/core/kqp/ut/perf', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/pg, name=unittest] (uid=rnd-u8k0fj3tvn16mk0m): Infrastructure error - contact devtools@ for details. Suite build deps: [pYxvZma5GJODj5ZUNpjkYg {'project_path': 'ydb/core/kqp/ut/pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/query, name=unittest] (uid=rnd-ifvbgeapf4948rgu): Infrastructure error - contact devtools@ for details. Suite build deps: [TesBczaM5360T80Jg2899w {'project_path': 'ydb/core/kqp/ut/query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scan, name=unittest] (uid=rnd-13pg3ilao6a9c3k3): Infrastructure error - contact devtools@ for details. Suite build deps: [MbhEKB4VGXPsy5BVZUxyMw {'project_path': 'ydb/core/kqp/ut/scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scheme, name=unittest] (uid=rnd-g5v1uk707f5is8j9): Infrastructure error - contact devtools@ for details. Suite build deps: [1yjRMwM31Gbu3iV5ChgQjg {'project_path': 'ydb/core/kqp/ut/scheme', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/service, name=unittest] (uid=rnd-62dy8phlww5x26vh): Infrastructure error - contact devtools@ for details. Suite build deps: [mgTu4PFjkrO96uDH0jMZeA {'project_path': 'ydb/core/kqp/ut/service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/sysview, name=unittest] (uid=rnd-8t7cdcgenln8p50z): Infrastructure error - contact devtools@ for details. Suite build deps: [pjpgpmo9q5T1dPKn0ztzig {'project_path': 'ydb/core/kqp/ut/sysview', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/tx, name=unittest] (uid=rnd-e836cv8ullor4a0l): Infrastructure error - contact devtools@ for details. Suite build deps: [-OsCQsf2eQFop1Zi7bA29A {'project_path': 'ydb/core/kqp/ut/tx', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/view, name=unittest] (uid=rnd-tqbmwv8hvr1dl63u): Infrastructure error - contact devtools@ for details. Suite build deps: [sOm74sP75q8XjSGGaAF-yA {'project_path': 'ydb/core/kqp/ut/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/yql, name=unittest] (uid=rnd-w5re9rnzzs886tas): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ujp-4rKk5woqn6GJcJJzJA {'project_path': 'ydb/core/kqp/ut/yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/workload_service/ut, name=unittest] (uid=rnd-2sainyu61gf11ols): Infrastructure error - contact devtools@ for details. Suite build deps: [QsPQx9IALj-WNhBDmPdy0g {'project_path': 'ydb/core/kqp/workload_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/load_test/ut, name=unittest] (uid=rnd-nbt7dw4pl1suv18h): Infrastructure error - contact devtools@ for details. Suite build deps: [gm-Irv9cVUpEvn81_t5dDw {'project_path': 'ydb/core/load_test/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/memory_controller/ut, name=unittest] (uid=rnd-4boz6y2dmnwyhumr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v7itusf_XPOVYg5o79X_WA {'project_path': 'ydb/core/memory_controller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/bscontroller/ut, name=unittest] (uid=rnd-d6t0sfgjcefxbboi): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zL4sG_duvRRQcK32Fw6Lpg {'project_path': 'ydb/core/mind/bscontroller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/bscontroller/ut_bscontroller, name=unittest] (uid=rnd-5n5ugu4tod30u9co): Infrastructure error - contact devtools@ for details. Suite build deps: [tBuitbCwk8UPAwxsaueQ6g {'project_path': 'ydb/core/mind/bscontroller/ut_bscontroller', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/hive/ut, name=unittest] (uid=rnd-hu22pqs4ui4wajr7): Infrastructure error - contact devtools@ for details. Suite build deps: [NLRTO0rsKVRDiXnoRvyV3g {'project_path': 'ydb/core/mind/hive/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut, name=unittest] (uid=rnd-2joomn18lopw46tz): Infrastructure error - contact devtools@ for details. Suite build deps: [WKBzuMCGKD4s0m-K7PJ0Sw {'project_path': 'ydb/core/mind/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut_fat, name=unittest] (uid=rnd-lgmuf4vmkqw6uklx): Infrastructure error - contact devtools@ for details. Suite build deps: [X3EBP3Zyuv2ssdQ4C5k5qQ {'project_path': 'ydb/core/mind/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/dread_cache_service/ut, name=unittest] (uid=rnd-s7yuvlgw9zfasc2y): Infrastructure error - contact devtools@ for details. Suite build deps: [6p7i9VjtxCMwd2wYsGfecA {'project_path': 'ydb/core/persqueue/dread_cache_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut, name=unittest] (uid=rnd-jhfd99dklcylelgg): Infrastructure error - contact devtools@ for details. Suite build deps: [sLr5bBZ5XkrmGdnqGvWRJQ {'project_path': 'ydb/core/persqueue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/slow, name=unittest] (uid=rnd-4l0tm7c4awidzqb2): Infrastructure error - contact devtools@ for details. Suite build deps: [Yv0igL7ImTNTs_IhNoidpg {'project_path': 'ydb/core/persqueue/ut/slow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/ut_with_sdk, name=unittest] (uid=rnd-b2zdmk6qd5xo4eiu): Infrastructure error - contact devtools@ for details. Suite build deps: [MZjLa2SOihiT4unx6n0Q6Q {'project_path': 'ydb/core/persqueue/ut/ut_with_sdk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/public_http/ut, name=unittest] (uid=rnd-aakzhwygfr9dnkz1): Infrastructure error - contact devtools@ for details. Suite build deps: [TSobaGMjJTH3HsIEXg9Lyg {'project_path': 'ydb/core/public_http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/quoter/ut, name=unittest] (uid=rnd-fdf233z3q0pkl3ed): Infrastructure error - contact devtools@ for details. Suite build deps: [7WshYQdC3ZU4B7p-0ITn6Q {'project_path': 'ydb/core/quoter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/scheme/ut, name=unittest] (uid=rnd-bd2h2vav8tg4vecq): Infrastructure error - contact devtools@ for details. Suite build deps: [SJE2ius14ptdmdzChVhbIA {'project_path': 'ydb/core/scheme/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/scheme/ut_pg, name=unittest] (uid=rnd-75biwc0xo1gu4tgz): Infrastructure error - contact devtools@ for details. Suite build deps: [9xAf20OQ0uDrKpMb8utTTQ {'project_path': 'ydb/core/scheme/ut_pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/certificate_check/ut, name=unittest] (uid=rnd-e4buvkii7ufp4r9x): Infrastructure error - contact devtools@ for details. Suite build deps: [sVcC2ZgcXnq9I_CGrZxDLg {'project_path': 'ydb/core/security/certificate_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ldap_auth_provider/ut, name=unittest] (uid=rnd-lv5jka2zvjevvv6o): Infrastructure error - contact devtools@ for details. Suite build deps: [Hv1JZlqjDc041b9Fds31Ew {'project_path': 'ydb/core/security/ldap_auth_provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ut, name=unittest] (uid=rnd-o17dfhbacix17cjp): Infrastructure error - contact devtools@ for details. Suite build deps: [6GkoBxvo7P1kraXEWRIdZg {'project_path': 'ydb/core/security/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/aggregator/ut, name=unittest] (uid=rnd-9x8wp4gchjv0ng6t): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v-5D_M66ptDAlbLmHresTQ {'project_path': 'ydb/core/statistics/aggregator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/statistics/database/ut, name=unittest] (uid=rnd-kq88kykrtvep5fe3): Infrastructure error - contact devtools@ for details. Suite build deps: [sFtxXbeKIxK8K6257P2hQA {'project_path': 'ydb/core/statistics/database/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut, name=unittest] (uid=rnd-orbfjn1n8ofoz5ko): Infrastructure error - contact devtools@ for details. Suite build deps: [IWdxmJZCdGkDkr_ZI-xCvA {'project_path': 'ydb/core/statistics/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/partition_stats/ut, name=unittest] (uid=rnd-8sklg6xtc7g0bsxc): Infrastructure error - contact devtools@ for details. Suite build deps: [P9zBNXjdHM5-1DBW-sNRqw {'project_path': 'ydb/core/sys_view/partition_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/query_stats/ut, name=unittest] (uid=rnd-oi0gws3myqv0h0sp): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tgcAXgrEHKZDXXwPskrVNA {'project_path': 'ydb/core/sys_view/query_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/sys_view/service/ut, name=unittest] (uid=rnd-1kyuoe71d84i8h8z): Infrastructure error - contact devtools@ for details. Suite build deps: [MqfkTDqqrA3U2rOW8e0jKQ {'project_path': 'ydb/core/sys_view/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/ut, name=unittest] (uid=rnd-bx32tn7t2cjfvq81): Infrastructure error - contact devtools@ for details. Suite build deps: [mEjkNLbaoT14oBk5AxsPZQ {'project_path': 'ydb/core/sys_view/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet/ut, name=unittest] (uid=rnd-mppx89hezdfdw06w): Infrastructure error - contact devtools@ for details. Suite build deps: [buJ5bNh17VNNRqZGU3thJQ {'project_path': 'ydb/core/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut, name=unittest] (uid=rnd-srensftv4x34m2yx): Infrastructure error - contact devtools@ for details. Suite build deps: [3HZqCWhVO25ALJI2dV6b7A {'project_path': 'ydb/core/tablet_flat/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut_pg, name=unittest] (uid=rnd-8sfpslnqzl32ontx): Infrastructure error - contact devtools@ for details. Suite build deps: [W22r3jZ-as-Nk6p3bPAEkg {'project_path': 'ydb/core/tablet_flat/ut_pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/testlib/actors/ut, name=unittest] (uid=rnd-kr85l2v83ruogh72): Infrastructure error - contact devtools@ for details. Suite build deps: [G8WjuvLEtvAL1glQA091Kg {'project_path': 'ydb/core/testlib/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/functional, name=unittest] (uid=rnd-63zrkf4v2hddazj5): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rdw5rWWFdFKg-tsmeSbxTg {'project_path': 'ydb/core/transfer/ut/functional', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/engines/ut, name=unittest] (uid=rnd-695s7ngpeozpjcbe): Infrastructure error - contact devtools@ for details. Suite build deps: [ON4qEKgqIJRWRtbh_LVJfA {'project_path': 'ydb/core/tx/columnshard/engines/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/splitter/ut, name=unittest] (uid=rnd-nzvzssqlmcrmxkdy): Infrastructure error - contact devtools@ for details. Suite build deps: [N3ride-DYMAavU9Q4Lp6pg {'project_path': 'ydb/core/tx/columnshard/splitter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_rw, name=unittest] (uid=rnd-y3vcirtlb3hnu047): Infrastructure error - contact devtools@ for details. Suite build deps: [par7jHdUJ4hUTxhS3Im9Cg {'project_path': 'ydb/core/tx/columnshard/ut_rw', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_schema, name=unittest] (uid=rnd-hdog5bcl9o8nfgf7): Infrastructure error - contact devtools@ for details. Suite build deps: [qS4SjflbJzmN22yuGIaXTw {'project_path': 'ydb/core/tx/columnshard/ut_schema', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/conveyor_composite/ut, name=unittest] (uid=rnd-1dlaoge9swhne4rw): Infrastructure error - contact devtools@ for details. Suite build deps: [Ca3NbTFnqmkqjS-cbRHqiA {'project_path': 'ydb/core/tx/conveyor_composite/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/coordinator/ut, name=unittest] (uid=rnd-mftew17n40r9k530): Infrastructure error - contact devtools@ for details. Suite build deps: [PBfFcLbihSC7Z3drvv3Obw {'project_path': 'ydb/core/tx/coordinator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/build_index/ut, name=unittest] (uid=rnd-ogt7s0gn1vagolhu): Infrastructure error - contact devtools@ for details. Suite build deps: [lJM5AKHTt6uQN1HDbh2SWg {'project_path': 'ydb/core/tx/datashard/build_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_background_compaction, name=unittest] (uid=rnd-lw3esm8xl0hpj28z): Infrastructure error - contact devtools@ for details. Suite build deps: [Ed7NXtR-NEnge9-ZR_5ZXA {'project_path': 'ydb/core/tx/datashard/ut_background_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_change_collector, name=unittest] (uid=rnd-34gbxuvhjoq03tif): Infrastructure error - contact devtools@ for details. Suite build deps: [TC0KRhm9ch8l0cmMqA0jQg {'project_path': 'ydb/core/tx/datashard/ut_change_collector', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_change_exchange, name=unittest] (uid=rnd-fummsem0rw3qt50g): Infrastructure error - contact devtools@ for details. Suite build deps: [lruW9tkVjtkcmASdB4eopw {'project_path': 'ydb/core/tx/datashard/ut_change_exchange', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_column_stats, name=unittest] (uid=rnd-srv7kvq32a7ofbv9): Infrastructure error - contact devtools@ for details. Suite build deps: [f2kzpwIwaXTLyTjfmFD04g {'project_path': 'ydb/core/tx/datashard/ut_column_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_compaction, name=unittest] (uid=rnd-xf9qzkvskcldjfxv): Infrastructure error - contact devtools@ for details. Suite build deps: [IhluGo4Y7Srsf2l5LN4QTw {'project_path': 'ydb/core/tx/datashard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_data_cleanup, name=unittest] (uid=rnd-31vn9lzynp9hd9zu): Infrastructure error - contact devtools@ for details. Suite build deps: [DWeL4NHvnJmUjvmz6lQUzA {'project_path': 'ydb/core/tx/datashard/ut_data_cleanup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_followers, name=unittest] (uid=rnd-8f5vph762hg4j552): Infrastructure error - contact devtools@ for details. Suite build deps: [50yQF4WOUsczRAPK0r81Ag {'project_path': 'ydb/core/tx/datashard/ut_followers', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_backup, name=unittest] (uid=rnd-jx819g55dfgswoan): Infrastructure error - contact devtools@ for details. Suite build deps: [j4DGOX_I5injMt6-8-GFrg {'project_path': 'ydb/core/tx/datashard/ut_incremental_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_restore_scan, name=unittest] (uid=rnd-6it4ebt3u69frz8w): Infrastructure error - contact devtools@ for details. Suite build deps: [5uo23LUreec_Bo8E2rh9zA {'project_path': 'ydb/core/tx/datashard/ut_incremental_restore_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_init, name=unittest] (uid=rnd-i45b9shfl63nxg83): Infrastructure error - contact devtools@ for details. Suite build deps: [bgY1N9GzExYWhF6LbyTJOA {'project_path': 'ydb/core/tx/datashard/ut_init', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_keys, name=unittest] (uid=rnd-6m1emj1w2s8myq2x): Infrastructure error - contact devtools@ for details. Suite build deps: [nwTOQkuyDzzCQe0qE0JWsw {'project_path': 'ydb/core/tx/datashard/ut_keys', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp, name=unittest] (uid=rnd-hazks5deiebetdw8): Infrastructure error - contact devtools@ for details. Suite build deps: [Sy750bn8ZLzrzTtlYJkedQ {'project_path': 'ydb/core/tx/datashard/ut_kqp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_errors, name=unittest] (uid=rnd-5itt2nw013gwgptf): Infrastructure error - contact devtools@ for details. Suite build deps: [LlhikqM7ZRcLpCzPuIy7sA {'project_path': 'ydb/core/tx/datashard/ut_kqp_errors', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_scan, name=unittest] (uid=rnd-avnskf6l3vdvh9uh): Infrastructure error - contact devtools@ for details. Suite build deps: [XDj0o_xYyMjtCEB0JXfKbg {'project_path': 'ydb/core/tx/datashard/ut_kqp_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_locks, name=unittest] (uid=rnd-egh22x90c8m99918): Infrastructure error - contact devtools@ for details. Suite build deps: [519upamC0JWX4ounNEdz0g {'project_path': 'ydb/core/tx/datashard/ut_locks', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minikql, name=unittest] (uid=rnd-ubr5tfx99ouctmoe): Infrastructure error - contact devtools@ for details. Suite build deps: [A1RR70SJbOh7a0q9LiP86Q {'project_path': 'ydb/core/tx/datashard/ut_minikql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minstep, name=unittest] (uid=rnd-sxr3e3gh1i9nwr6o): Infrastructure error - contact devtools@ for details. Suite build deps: [cxdHfFklzLCKeMCmY34Dow {'project_path': 'ydb/core/tx/datashard/ut_minstep', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_order, name=unittest] (uid=rnd-obtzucufka3jq3co): Infrastructure error - contact devtools@ for details. Suite build deps: [ec9J9B3ZX2pINXsofrbLsQ {'project_path': 'ydb/core/tx/datashard/ut_order', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_range_ops, name=unittest] (uid=rnd-tf2suya8xp7pd9fi): Infrastructure error - contact devtools@ for details. Suite build deps: [RQtHc1kM3LrPP3p1DpUdhQ {'project_path': 'ydb/core/tx/datashard/ut_range_ops', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_iterator, name=unittest] (uid=rnd-7n0ygzmpjs5qei7t): Infrastructure error - contact devtools@ for details. Suite build deps: [Ndqvt7YtvJoeIqBMbug9ig {'project_path': 'ydb/core/tx/datashard/ut_read_iterator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_table, name=unittest] (uid=rnd-9tgfmdce11ylspcg): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [veHo4cKHGh7eLddjVtjEfQ {'project_path': 'ydb/core/tx/datashard/ut_read_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_reassign, name=unittest] (uid=rnd-hegeha5cctslgcsj): Infrastructure error - contact devtools@ for details. Suite build deps: [JKBUEPdJcJeT7KWSTHoiDw {'project_path': 'ydb/core/tx/datashard/ut_reassign', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_replication, name=unittest] (uid=rnd-40bs5ab4lb7rzdlt): Infrastructure error - contact devtools@ for details. Suite build deps: [7dinraTGwxANCM1bJZQIJQ {'project_path': 'ydb/core/tx/datashard/ut_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_rs, name=unittest] (uid=rnd-lum124i5nytiztex): Infrastructure error - contact devtools@ for details. Suite build deps: [ivhP2n_nr3e9ulp17Mxlxw {'project_path': 'ydb/core/tx/datashard/ut_rs', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_sequence, name=unittest] (uid=rnd-0o36qi5o1nybbu5y): Infrastructure error - contact devtools@ for details. Suite build deps: [ImJUf9ojLilH6j1iwTvmvQ {'project_path': 'ydb/core/tx/datashard/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_snapshot, name=unittest] (uid=rnd-hgjp0xmfjk26wyrx): Infrastructure error - contact devtools@ for details. Suite build deps: [NBF8NsSdwnnuSAtwzZISuw {'project_path': 'ydb/core/tx/datashard/ut_snapshot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_stats, name=unittest] (uid=rnd-qg22u8nlhstpe64u): Infrastructure error - contact devtools@ for details. Suite build deps: [EgqnKrK7j4xCBtafhDOKOg {'project_path': 'ydb/core/tx/datashard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_trace, name=unittest] (uid=rnd-laj68mjm2u4tbpgj): Infrastructure error - contact devtools@ for details. Suite build deps: [1F-1ZfUGX1cZv0fC4ZppZg {'project_path': 'ydb/core/tx/datashard/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_upload_rows, name=unittest] (uid=rnd-qsy0wp2i0apfjtgt): Infrastructure error - contact devtools@ for details. Suite build deps: [P8ATl7cyii4QYvL92oWjrg {'project_path': 'ydb/core/tx/datashard/ut_upload_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_volatile, name=unittest] (uid=rnd-p4ik47t3qtcu91vs): Infrastructure error - contact devtools@ for details. Suite build deps: [B6gb-ZW92oDAxyqqvwoueg {'project_path': 'ydb/core/tx/datashard/ut_volatile', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/locks/ut_range_treap, name=unittest] (uid=rnd-w43v5h8osaaflco0): Infrastructure error - contact devtools@ for details. Suite build deps: [SN5PyQ6kxdIcqG6SKiYTJQ {'project_path': 'ydb/core/tx/locks/ut_range_treap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/long_tx_service/ut, name=unittest] (uid=rnd-jpznfrh4wervpvnr): Infrastructure error - contact devtools@ for details. Suite build deps: [EvOmqMCCqjDjoQBXbB-Prw {'project_path': 'ydb/core/tx/long_tx_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/mediator/ut, name=unittest] (uid=rnd-hvp9lhlrgvpeozrf): Infrastructure error - contact devtools@ for details. Suite build deps: [JEDo5VmT0xCh8Orn1mMTeQ {'project_path': 'ydb/core/tx/mediator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_assign_tx_id, name=unittest] (uid=rnd-ybb4uwxrckt651nx): Infrastructure error - contact devtools@ for details. Suite build deps: [9fXG8TUUL10SxTIelIP9-g {'project_path': 'ydb/core/tx/replication/controller/ut_assign_tx_id', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_dst_creator, name=unittest] (uid=rnd-ws060reljnou6a0z): Infrastructure error - contact devtools@ for details. Suite build deps: [36N5z1cLijEfUggFDJzQOQ {'project_path': 'ydb/core/tx/replication/controller/ut_dst_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_stream_creator, name=unittest] (uid=rnd-zp1qyyfzorsriuli): Infrastructure error - contact devtools@ for details. Suite build deps: [khIMP9i_PhgOyvtGr3Th4Q {'project_path': 'ydb/core/tx/replication/controller/ut_stream_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_target_discoverer, name=unittest] (uid=rnd-099djr3a0q5qnsrr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wWcoagIAUlSADTlOrDUL0g {'project_path': 'ydb/core/tx/replication/controller/ut_target_discoverer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/replication/service/ut_topic_reader, name=unittest] (uid=rnd-aiug78hq43folxpl): Infrastructure error - contact devtools@ for details. Suite build deps: [tYkuucOsGZ7ucSywgW5ktA {'project_path': 'ydb/core/tx/replication/service/ut_topic_reader', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_worker, name=unittest] (uid=rnd-5ggp9livjmkjoeo7): Infrastructure error - contact devtools@ for details. Suite build deps: [tY2audAvTndyD6d6KGMruw {'project_path': 'ydb/core/tx/replication/service/ut_worker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/ydb_proxy/ut, name=unittest] (uid=rnd-rcw8o3izk3bwh0gr): Infrastructure error - contact devtools@ for details. Suite build deps: [XcMIKyGO2OBuD5sbqtiomQ {'project_path': 'ydb/core/tx/replication/ydb_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_monitoring, name=unittest] (uid=rnd-j1j4fxp272t9l0pe): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wiIrGU40aOrnVhLMl-pS_w {'project_path': 'ydb/core/tx/scheme_board/ut_monitoring', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/scheme_board/ut_populator, name=unittest] (uid=rnd-v9b4syk5lpnvx7hh): Infrastructure error - contact devtools@ for details. Suite build deps: [oDizRx8iwcYXbM6XP8ayzA {'project_path': 'ydb/core/tx/scheme_board/ut_populator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_replica, name=unittest] (uid=rnd-y7lwq9r79d7x9to9): Infrastructure error - contact devtools@ for details. Suite build deps: [J50PjVu03eHTShfkopU6oA {'project_path': 'ydb/core/tx/scheme_board/ut_replica', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_subscriber, name=unittest] (uid=rnd-e2z1xsd4e2jzn5q1): Infrastructure error - contact devtools@ for details. Suite build deps: [ltT7N7yJ1-aKCE6In2Iazg {'project_path': 'ydb/core/tx/scheme_board/ut_subscriber', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_auditsettings, name=unittest] (uid=rnd-40j2k8lhglb8vzdm): Infrastructure error - contact devtools@ for details. Suite build deps: [GE3na-0fKQIlPVDAJVGMkg {'project_path': 'ydb/core/tx/schemeshard/ut_auditsettings', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_background_cleaning, name=unittest] (uid=rnd-9lq74rvwfh0wk5fh): Infrastructure error - contact devtools@ for details. Suite build deps: [3Eakc7LDropS8CoCLaMKSQ {'project_path': 'ydb/core/tx/schemeshard/ut_background_cleaning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup, name=unittest] (uid=rnd-pu0uyt0fe80rr0p0): Infrastructure error - contact devtools@ for details. Suite build deps: [0ys0tEjpEjY_hSv2LOJ9Kg {'project_path': 'ydb/core/tx/schemeshard/ut_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection, name=unittest] (uid=rnd-htxmevw1ic0ly2jj): Infrastructure error - contact devtools@ for details. Suite build deps: [AvtQhdmdYlgtZGQZAVzT3w {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection_reboots, name=unittest] (uid=rnd-uhvm62lqr81pwgli): Infrastructure error - contact devtools@ for details. Suite build deps: [kzavjkXh-MFkjSpAPaVh3Q {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base_reboots, name=unittest] (uid=rnd-lvbqo5xbu6b5oyz5): Infrastructure error - contact devtools@ for details. Suite build deps: [SWZfRP638apOusNQUNXsZA {'project_path': 'ydb/core/tx/schemeshard/ut_base_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume, name=unittest] (uid=rnd-5k22pbxbm4d5jbin): Infrastructure error - contact devtools@ for details. Suite build deps: [hUA0jVe9T93yFODoAHhgTQ {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume_reboots, name=unittest] (uid=rnd-l32nd7mahxkjdxl9): Infrastructure error - contact devtools@ for details. Suite build deps: [OEptIvlE4AMzW3ME4rcJlg {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream_reboots, name=unittest] (uid=rnd-sxrm270isk17paph): Infrastructure error - contact devtools@ for details. Suite build deps: [4mclijsbV24fn21BA5ig_Q {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_column_build, name=unittest] (uid=rnd-sx2vqq8dx9atmg88): Infrastructure error - contact devtools@ for details. Suite build deps: [jzFa8oOo-Y63jr-fWwHcig {'project_path': 'ydb/core/tx/schemeshard/ut_column_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_compaction, name=unittest] (uid=rnd-7nza00wb3bmh29bm): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xZ4R-SuzXFIE8cuS3ZHhvw {'project_path': 'ydb/core/tx/schemeshard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup, name=unittest] (uid=rnd-j5hytdx19wfi3ncf): Infrastructure error - contact devtools@ for details. Suite build deps: [JGY4ppX8h4GQxWeLOq6NHA {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_data_erasure, name=unittest] (uid=rnd-ldwuqibzs8vax37d): Infrastructure error - contact devtools@ for details. Suite build deps: [Szhbx92txwpgke0y3Cr74w {'project_path': 'ydb/core/tx/schemeshard/ut_data_erasure', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export, name=unittest] (uid=rnd-1cludzkr6jbj3141): Infrastructure error - contact devtools@ for details. Suite build deps: [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zeB8321f8FND7U1nPLlocg {'project_path': 'ydb/core/tx/schemeshard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export_reboots_s3, name=unittest] (uid=rnd-nu55ey5pe52js60p): Infrastructure error - contact devtools@ for details. Suite build deps: [PfP4kRynpCXVBy3AxgYm2Q {'project_path': 'ydb/core/tx/schemeshard/ut_export_reboots_s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source, name=unittest] (uid=rnd-mh286hy5dirvxi5y): Infrastructure error - contact devtools@ for details. Suite build deps: [JCnTz_wWSoHX8QM7gaUMAQ {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source_reboots, name=unittest] (uid=rnd-mekns88lakhfti9q): Infrastructure error - contact devtools@ for details. Suite build deps: [GsLmeqBKPBYyaqlyn2yjKg {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain_reboots, name=unittest] (uid=rnd-sjr8rr8rvufpb11d): Infrastructure error - contact devtools@ for details. Suite build deps: [5yMSufNBHN0EeYF1PW3tCA {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_filestore_reboots, name=unittest] (uid=rnd-ohcuvt9p77rqiyq1): Infrastructure error - contact devtools@ for details. Suite build deps: [4dZRzw23GQVfk4TxJfopTQ {'project_path': 'ydb/core/tx/schemeshard/ut_filestore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index, name=unittest] (uid=rnd-xrq02qejuu0g6l4i): Infrastructure error - contact devtools@ for details. Suite build deps: [-rPfpTI_cH4zwmci8HkU1Q {'project_path': 'ydb/core/tx/schemeshard/ut_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build, name=unittest] (uid=rnd-qx1hwuu34nme6nvj): Infrastructure error - contact devtools@ for details. Suite build deps: [oTMK5IgibclWDngtc9e-sw {'project_path': 'ydb/core/tx/schemeshard/ut_index_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build_reboots, name=unittest] (uid=rnd-0e23prs62kcct3zl): Infrastructure error - contact devtools@ for details. Suite build deps: [gg8rKprErnVW5MAVsYnEPA {'project_path': 'ydb/core/tx/schemeshard/ut_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_login, name=unittest] (uid=rnd-1x3l0gxh4m5x2fag): Infrastructure error - contact devtools@ for details. Suite build deps: [NRCeknv-3NWhnA0kIyQnGQ {'project_path': 'ydb/core/tx/schemeshard/ut_login', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap, name=unittest] (uid=rnd-dt37h2ds221rvnx0): Infrastructure error - contact devtools@ for details. Suite build deps: [NhLfhubTZwPp8bhfrcsUNw {'project_path': 'ydb/core/tx/schemeshard/ut_olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap_reboots, name=unittest] (uid=rnd-syc38l9cbs0zzcsa): Infrastructure error - contact devtools@ for details. Suite build deps: [EWQJuIGxXj1iTl9IX_Zu4Q {'project_path': 'ydb/core/tx/schemeshard/ut_olap_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_reboots, name=unittest] (uid=rnd-uureorzonjbrxry7): Infrastructure error - contact devtools@ for details. Suite build deps: [XndnUI2CeZtJfdSH-WTRoA {'project_path': 'ydb/core/tx/schemeshard/ut_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_replication_reboots, name=unittest] (uid=rnd-wf68vwkllr3tu4ae): Infrastructure error - contact devtools@ for details. Suite build deps: [oJxrsnJwfQMhvWFRB_Uu_w {'project_path': 'ydb/core/tx/schemeshard/ut_replication_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_restore, name=unittest] (uid=rnd-vd890798n8vdfuar): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zNKcMcb370I9vTxeOwL4Vw {'project_path': 'ydb/core/tx/schemeshard/ut_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr_reboots, name=unittest] (uid=rnd-sv1erbg1oapctci9): Infrastructure error - contact devtools@ for details. Suite build deps: [Rnfs9OlphDORXVoneM9SZw {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ru_calculator, name=unittest] (uid=rnd-hbwcu7pvc25tftwg): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xQ6D19y9SFyKQjQBXhJ4Og {'project_path': 'ydb/core/tx/schemeshard/ut_ru_calculator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_sequence_reboots, name=unittest] (uid=rnd-evbria6ahj8dik7k): Infrastructure error - contact devtools@ for details. Suite build deps: [SA9-FQUmIFGJJt2nPruaTw {'project_path': 'ydb/core/tx/schemeshard/ut_sequence_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless, name=unittest] (uid=rnd-sqw4ouyffg2atlv4): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z50Wv4Z0hw8LWnZvUpXdXQ {'project_path': 'ydb/core/tx/schemeshard/ut_serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless_reboots, name=unittest] (uid=rnd-th8398et79tz35p6): Infrastructure error - contact devtools@ for details. Suite build deps: [j7UT-xTEk1toC7jTk9NnyA {'project_path': 'ydb/core/tx/schemeshard/ut_serverless_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_split_merge, name=unittest] (uid=rnd-1ndgzg3rjukzxuws): Infrastructure error - contact devtools@ for details. Suite build deps: [pWpOOgzr7vNh0C2pqhCN9g {'project_path': 'ydb/core/tx/schemeshard/ut_split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_stats, name=unittest] (uid=rnd-og4ymun3f701rqgi): Infrastructure error - contact devtools@ for details. Suite build deps: [UjfbYPhvupZZXKnogTtLMQ {'project_path': 'ydb/core/tx/schemeshard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain, name=unittest] (uid=rnd-w7xg7bbe12z6awj5): Infrastructure error - contact devtools@ for details. Suite build deps: [63sGusIRsBQjwW_QYNYxSg {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain_reboots, name=unittest] (uid=rnd-qwpihyp97sp3ym1q): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u6zC-QIIb6t8lO7jX-KlSA {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_transfer, name=unittest] (uid=rnd-e5pnu0kpjpwlu3ev): Infrastructure error - contact devtools@ for details. Suite build deps: [BFfnGzkWuTUqaIcwxcH_-g {'project_path': 'ydb/core/tx/schemeshard/ut_transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ttl, name=unittest] (uid=rnd-v8yxpii7b4fysg9i): Infrastructure error - contact devtools@ for details. Suite build deps: [f8_mqTfljKrvVR8ODe1_tA {'project_path': 'ydb/core/tx/schemeshard/ut_ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes, name=unittest] (uid=rnd-a888zimu6q4ry2jw): Infrastructure error - contact devtools@ for details. Suite build deps: [e_1BL7iFcTBOM1YC_wi6jA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes_reboots, name=unittest] (uid=rnd-0auw8c8jscinpbpo): Infrastructure error - contact devtools@ for details. Suite build deps: [-RVPJdQnZHVG3sKm1wTD3g {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_vector_index_build_reboots, name=unittest] (uid=rnd-9ucm1s678kyd49h1): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vmP5hs82bgSg38rKRU4Ecg {'project_path': 'ydb/core/tx/schemeshard/ut_vector_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/sequenceproxy/ut, name=unittest] (uid=rnd-xc307u2tpt3jvxgy): Infrastructure error - contact devtools@ for details. Suite build deps: [VXbVZkE8vzCDiSp5bLPEQA {'project_path': 'ydb/core/tx/sequenceproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceshard/ut, name=unittest] (uid=rnd-8lczexwqd79ofn1z): Infrastructure error - contact devtools@ for details. Suite build deps: [OxuZAQI5n2iljYgURntxWg {'project_path': 'ydb/core/tx/sequenceshard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sharding/ut, name=unittest] (uid=rnd-a0ujj67gypaupke2): Infrastructure error - contact devtools@ for details. Suite build deps: [GHjrklGcEP8dOejwMGAM5A {'project_path': 'ydb/core/tx/sharding/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tiering/ut, name=unittest] (uid=rnd-2vx5kd3rtm1uoor6): Infrastructure error - contact devtools@ for details. Suite build deps: [NJ3iv-y6EnkgPm3_lpwlkA {'project_path': 'ydb/core/tx/tiering/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/time_cast/ut, name=unittest] (uid=rnd-0tuj2oskdrspvokj): Infrastructure error - contact devtools@ for details. Suite build deps: [McDPw-B6zjlGlKm3uuTyaw {'project_path': 'ydb/core/tx/time_cast/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator/ut, name=unittest] (uid=rnd-zoev3r1oop53kwzd): Infrastructure error - contact devtools@ for details. Suite build deps: [dPbnuMfYwVqVGra2l7XMZQ {'project_path': 'ydb/core/tx/tx_allocator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator_client/ut, name=unittest] (uid=rnd-6ul0ujcf6597wb0q): Infrastructure error - contact devtools@ for details. Suite build deps: [MYhfEGsg5gouIRpenNMdbA {'project_path': 'ydb/core/tx/tx_allocator_client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_base_tenant, name=unittest] (uid=rnd-87yux90univz2w0q): Infrastructure error - contact devtools@ for details. Suite build deps: [qKfJWVW3nQ24vyyAGLRCSA {'project_path': 'ydb/core/tx/tx_proxy/ut_base_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_encrypted_storage, name=unittest] (uid=rnd-2ztarj7405k213c3): Infrastructure error - contact devtools@ for details. Suite build deps: [JiqpXH62y_WsSLjLfsssSQ {'project_path': 'ydb/core/tx/tx_proxy/ut_encrypted_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_ext_tenant, name=unittest] (uid=rnd-f1tnn5nxp6d9s259): Infrastructure error - contact devtools@ for details. Suite build deps: [9SJjOu_RsH4Zn9eolhULlQ {'project_path': 'ydb/core/tx/tx_proxy/ut_ext_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_schemereq, name=unittest] (uid=rnd-na8006llcfr4cr0l): Infrastructure error - contact devtools@ for details. Suite build deps: [FBmaJZcTAEPgDPTCG9Lrvg {'project_path': 'ydb/core/tx/tx_proxy/ut_schemereq', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_storage_tenant, name=unittest] (uid=rnd-okry7lny9vibssem): Infrastructure error - contact devtools@ for details. Suite build deps: [rz4FgeLqO8fIs8a5-nC5VA {'project_path': 'ydb/core/tx/tx_proxy/ut_storage_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/util/ut, name=unittest] (uid=rnd-20gxnut2quzlfrk6): Infrastructure error - contact devtools@ for details. Suite build deps: [P1vzN8NEfkB2Bq-4zNhaXg {'project_path': 'ydb/core/util/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/ut, name=unittest] (uid=rnd-zwhlzynh6bakjl9j): Infrastructure error - contact devtools@ for details. Suite build deps: [UyWieU-1nC3erjvV98QpTQ {'project_path': 'ydb/core/ymq/actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/yc_search_ut, name=unittest] (uid=rnd-bvyz0gq9cpfj95rt): Infrastructure error - contact devtools@ for details. Suite build deps: [QaMtdRhs0Sziq3KM1cISTg {'project_path': 'ydb/core/ymq/actor/yc_search_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/base/ut, name=unittest] (uid=rnd-p2dghhg5j55vtwgj): Infrastructure error - contact devtools@ for details. Suite build deps: [kFt49FXjpRkBmIfDX0gqxQ {'project_path': 'ydb/core/ymq/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/http/ut, name=unittest] (uid=rnd-7u39vq3ny3x4f89o): Infrastructure error - contact devtools@ for details. Suite build deps: [4PVUS965Tp7y_5ZTxzR3YA {'project_path': 'ydb/core/ymq/http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/ut, name=unittest] (uid=rnd-setdcf6hs7r5nv0h): Infrastructure error - contact devtools@ for details. Suite build deps: [Z5DdgjeHGeYd0vstQaui8g {'project_path': 'ydb/core/ymq/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/actors/testlib/ut, name=unittest] (uid=rnd-8ch90qyew5dbm87d): Infrastructure error - contact devtools@ for details. Suite build deps: [aCRyOlL-MSdrRe0gXL4u6Q {'project_path': 'ydb/library/actors/testlib/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/ncloud/impl/ut, name=unittest] (uid=rnd-24q2d1bjpitw9ge3): Infrastructure error - contact devtools@ for details. Suite build deps: [qgXNEjkGo2HOM0JdkH_pCQ {'project_path': 'ydb/library/ncloud/impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/persqueue/topic_parser/ut, name=unittest] (uid=rnd-l8et8m22k7b32qis): Infrastructure error - contact devtools@ for details. Suite build deps: [S6c6lP3QWNRN1QZGCFL4ig {'project_path': 'ydb/library/persqueue/topic_parser/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/query_actor/ut, name=unittest] (uid=rnd-uo51hgfzpee65msw): Infrastructure error - contact devtools@ for details. Suite build deps: [AaQnu9EuYJsOy_oOcUJ3WQ {'project_path': 'ydb/library/query_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/dq/runtime/ut, name=unittest] (uid=rnd-dzd220kb90ru5zky): Infrastructure error - contact devtools@ for details. Suite build deps: [Bk4cC-AQDuya-rWwNGIzNg {'project_path': 'ydb/library/yql/dq/runtime/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/generic/actors/ut, name=unittest] (uid=rnd-zcaqwzt1pw2il4dy): Infrastructure error - contact devtools@ for details. Suite build deps: [pWT3HdnD4yHriNCf5NTrcA {'project_path': 'ydb/library/yql/providers/generic/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/solomon/actors/ut, name=unittest] (uid=rnd-nxnwgcubgm4b7jot): Infrastructure error - contact devtools@ for details. Suite build deps: [D63QH9kun45aOk-RnCe5jQ {'project_path': 'ydb/library/yql/providers/solomon/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [afbAP8BqVD2NZxbfhk0uJQ {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jWrmtcwzfDvZhQhFDy429w {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/lib/ydb_cli/topic/ut, name=unittest] (uid=rnd-01pt012su1rl2ben): Infrastructure error - contact devtools@ for details. Suite build deps: [ZAgnxQHt0PuJ03jhrYtcVg {'project_path': 'ydb/public/lib/ydb_cli/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/federated_topic/ut, name=unittest] (uid=rnd-1fvdiwv79t196zqs): Infrastructure error - contact devtools@ for details. Suite build deps: [eOSLsMbxYu5a3-J_HHRzsg {'project_path': 'ydb/public/sdk/cpp/src/client/federated_topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut, name=unittest] (uid=rnd-ugbttstp2tdjrcae): Infrastructure error - contact devtools@ for details. Suite build deps: [ZSiCdByYvq5Th8ph8AnP-A {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut, name=unittest] (uid=rnd-477fa0ptaztpxo0c): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yM0a8IXO30dXcEabBviJKg {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut, name=unittest] (uid=rnd-9cv10cmxue8sktfc): Infrastructure error - contact devtools@ for details. Suite build deps: [08cwdfUF3kFwjZxDtP9p-w {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/cms/ut, name=unittest] (uid=rnd-c7k98939uty2ub4k): Infrastructure error - contact devtools@ for details. Suite build deps: [ZQ1ZZp4vufNrU-ZWHmVHuw {'project_path': 'ydb/services/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/config/ut, name=unittest] (uid=rnd-pkia3l0eijjxgnu5): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zwBQVwes4uEpGo1raUPKQw {'project_path': 'ydb/services/config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/datastreams/ut, name=unittest] (uid=rnd-eppy0tpqajydvpa8): Infrastructure error - contact devtools@ for details. Suite build deps: [XD-EWQA6rJWof-NuiWxAig {'project_path': 'ydb/services/datastreams/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/dynamic_config/ut, name=unittest] (uid=rnd-puwic6ina40pm3xm): Infrastructure error - contact devtools@ for details. Suite build deps: [VEcNCqVK8BMUwI665sxRXQ {'project_path': 'ydb/services/dynamic_config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ext_index/ut, name=unittest] (uid=rnd-9fxflagfz8cxuvuw): Infrastructure error - contact devtools@ for details. Suite build deps: [j2ckFwW--OFNSmyvzjlDvA {'project_path': 'ydb/services/ext_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/fq/ut_integration, name=unittest] (uid=rnd-i7qe94dw3szwbug0): Infrastructure error - contact devtools@ for details. Suite build deps: [KEkQj5TTvL7v6XKWnuRaSw {'project_path': 'ydb/services/fq/ut_integration', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/keyvalue/ut, name=unittest] (uid=rnd-74k7dfbzfhw2erz4): Infrastructure error - contact devtools@ for details. Suite build deps: [ZP4SCJEfSJ-ZBQZ9ApQiKQ {'project_path': 'ydb/services/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/secret/ut, name=unittest] (uid=rnd-97aytdwa1f8vhsbw): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xXLWmAvaYQ2Ks3quFre-Xg {'project_path': 'ydb/services/metadata/secret/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/persqueue_cluster_discovery/ut, name=unittest] (uid=rnd-m8fxxq6uk4jgkxst): Infrastructure error - contact devtools@ for details. Suite build deps: [q6KCN12AScvsCakB4TzYIA {'project_path': 'ydb/services/persqueue_cluster_discovery/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut, name=unittest] (uid=rnd-mpd3yw4rmz360fp8): Infrastructure error - contact devtools@ for details. Suite build deps: [eYQllrdolYztQbIZVGcxlw {'project_path': 'ydb/services/persqueue_v1/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/rate_limiter/ut, name=unittest] (uid=rnd-rqrzgmr5p9h5bgna): Infrastructure error - contact devtools@ for details. Suite build deps: [KMt1c0Ov4yE8eo3Nzkc8KA {'project_path': 'ydb/services/rate_limiter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/sdk_sessions_pool_ut, name=unittest] (uid=rnd-9p3pzf83icmbl3cn): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [w_Bb3qwIt964VG8dnKn8qQ {'project_path': 'ydb/services/ydb/sdk_sessions_pool_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/ydb/sdk_sessions_ut, name=unittest] (uid=rnd-87pc6mdyd84t8kq4): Infrastructure error - contact devtools@ for details. Suite build deps: [VBOtjlTn-GFJuiMxgpYFOg {'project_path': 'ydb/services/ydb/sdk_sessions_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/table_split_ut, name=unittest] (uid=rnd-6vhrakkh79h0nxw1): Infrastructure error - contact devtools@ for details. Suite build deps: [dq72y74lBIFUL7wjYveIJg {'project_path': 'ydb/services/ydb/table_split_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/ut, name=unittest] (uid=rnd-ro6yweei469pdip9): Infrastructure error - contact devtools@ for details. Suite build deps: [nmQ8_29ho_-p4UsKoNSzvQ {'project_path': 'ydb/services/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/control_plane_storage, name=unittest] (uid=rnd-8pomcflgzmmfbl3l): Infrastructure error - contact devtools@ for details. Suite build deps: [-XaYuhY7RxKBSx1l4RWsgg {'project_path': 'ydb/tests/fq/control_plane_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/pq_async_io/ut, name=unittest] (uid=rnd-i3xc9z9m9j6se6fd): Infrastructure error - contact devtools@ for details. Suite build deps: [5nGc59DZ85okwuCWyBkpiQ {'project_path': 'ydb/tests/fq/pq_async_io/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup, name=unittest] (uid=rnd-mhv38fut4bqdq1ab): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bN7W2WVMYCpBrFLEGLNGxg {'project_path': 'ydb/tests/functional/backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/backup/s3_path_style, name=unittest] (uid=rnd-t6odmuzm5hh4thow): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Y5b83d3WjzfTqw7l7fRKRg {'project_path': 'ydb/tests/functional/backup/s3_path_style', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/kqp/kqp_indexes, name=unittest] (uid=rnd-wsm3hu509qwhxc8f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_bp4Z1PK-crRkq9qIbFrTA {'project_path': 'ydb/tests/functional/kqp/kqp_indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_session, name=unittest] (uid=rnd-de4ih0282getbu71): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ds7GTAYkisroQnMrU3yjcg {'project_path': 'ydb/tests/functional/kqp/kqp_query_session', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_svc, name=unittest] (uid=rnd-fl2gb5s5xn9p4et1): Infrastructure error - contact devtools@ for details. Suite build deps: [0S_vq78qwiwZu77-vrePzw {'project_path': 'ydb/tests/functional/kqp/kqp_query_svc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/replication, name=unittest] (uid=rnd-n6afb2t2r6fp009s): Infrastructure error - contact devtools@ for details. Suite build deps: [2WsX5QEvKs50Pv6ufzFAgA {'project_path': 'ydb/tests/functional/replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sdk/cpp/sdk_credprovider, name=unittest] (uid=rnd-darux0mmc1gpl39q): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [U5s94rOjx9N8XJe89PMEWA {'project_path': 'ydb/tests/functional/sdk/cpp/sdk_credprovider', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/gateway/ut, name=gtest] (uid=rnd-vzlrner32vlb3kyc): Infrastructure error - contact devtools@ for details. Suite build deps: [nWSR8mxp_2yQiztLrU2KXQ {'project_path': 'ydb/core/kqp/gateway/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/basic_example, name=gtest] (uid=rnd-01mn297t0c2k0ihj): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VDdpln3gW84-8k24B_DNgQ {'project_path': 'ydb/public/sdk/cpp/tests/integration/basic_example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/bulk_upsert, name=gtest] (uid=rnd-wj7pbudg3iah64v3): Infrastructure error - contact devtools@ for details. Suite build deps: [0Qyf4WuPiqygD1i6FLD2tw {'project_path': 'ydb/public/sdk/cpp/tests/integration/bulk_upsert', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/tests/integration/server_restart, name=gtest] (uid=rnd-1iqfdwcrbtwvotgz): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [dGumwYSFHM7acnKMsXToRw {'project_path': 'ydb/public/sdk/cpp/tests/integration/server_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/tools/local_ydb, name=import_test] (uid=rnd-pw2tcs2dmzr4lgor): Infrastructure error - contact devtools@ for details. Suite build deps: [FzzzGBdMUOxR_flajqmjGA {'project_path': 'ydb/public/tools/local_ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mhCyxXngbTw02ZAy20xMBQ {'project_path': 'ydb/public/tools/local_ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/s3, name=import_test] (uid=rnd-c9mlc55w4mivmf62): Infrastructure error - contact devtools@ for details. Suite build deps: [Zd4996ODGBwuhs5K1R1y1g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/cms, name=import_test] (uid=rnd-6dow2gbaootlrxaw): Infrastructure error - contact devtools@ for details. Suite build deps: [2nTzn1ldkfh9muS7HZuObw {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/large, name=import_test] (uid=rnd-px2usx3n9uomb97q): Infrastructure error - contact devtools@ for details. Suite build deps: [K0GJ4wT0KNSLi7gJ4ZthgQ {'project_path': 'ydb/tests/functional/tpc/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=import_test] (uid=rnd-mfd36twrveus12sg): Infrastructure error - contact devtools@ for details. Suite build deps: [OuZpGgEj7jnCgkIGQwDYqw {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/load, name=import_test] (uid=rnd-5pmbm9hkl6kvjy6x): Infrastructure error - contact devtools@ for details. Suite build deps: [BgYFRD4Accd4XFxT9AvZ7w {'project_path': 'ydb/tests/olap/load', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/driver, name=import_test] (uid=rnd-frl98aqk8iwua5og): Infrastructure error - contact devtools@ for details. Suite build deps: [58reXgwqti57mG1-gdItsg {'project_path': 'ydb/tests/tools/nemesis/driver', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [8Yiuvrmr7LP75gAz6Lpszg {'project_path': 'ydb/tests/tools/nemesis/driver', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tools/cfg/bin, name=import_test] (uid=rnd-yar2raiosbrt19jp): Infrastructure error - contact devtools@ for details. Suite build deps: [ZAzd7gghKmq9jrtEIgmWWg {'project_path': 'ydb/tools/cfg/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_WyPEQJEqUOqLiix9ZDbug {'project_path': 'ydb/tools/cfg/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/viewer/tests, name=py3test] (uid=rnd-j43j8w1k8ojasqta): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [M-OGqqNBx631GzOHbV6tXg {'project_path': 'ydb/core/viewer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/example, name=py3test] (uid=rnd-974yhu34ur1kiifm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bf1789YZ5Bn1q4Z9Fo0ouQ {'project_path': 'ydb/tests/example', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/common, name=py3test] (uid=rnd-llqjose9qgomrbex): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [IJNiR4ipPHFvpDXrW9eTeg {'project_path': 'ydb/tests/fq/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/http_api, name=py3test] (uid=rnd-f63yzndb81fgp8q8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sTN3d-q0JV4IpRJzqwUmMg {'project_path': 'ydb/tests/fq/http_api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/mem_alloc, name=py3test] (uid=rnd-9mg4iunvc3furr0x): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bMd40EeiJf4Ux4GI2Ycnjg {'project_path': 'ydb/tests/fq/mem_alloc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/multi_plane, name=py3test] (uid=rnd-dadjzwtjqgjm8uco): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xnT5c5swn4gL-P7LaGfIlQ {'project_path': 'ydb/tests/fq/multi_plane', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/plans, name=py3test] (uid=rnd-j0jhz209q1s5k1mi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wtoWF6_ldp-CrGCdip-XOg {'project_path': 'ydb/tests/fq/plans', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/restarts, name=py3test] (uid=rnd-oh1wm0jrbya0uo3t): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [dEZKzNcj_nZbUS3zA-XTPw {'project_path': 'ydb/tests/fq/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/s3, name=py3test] (uid=rnd-3jz2cyg2pitkcqf7): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Zd4996ODGBwuhs5K1R1y1g {'project_path': 'ydb/tests/fq/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/fq/solomon, name=py3test] (uid=rnd-7otzkbrzu6lf1hpk): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WiKGu27fC3ule5-gXFfxyg {'project_path': 'ydb/library/yql/tools/solomon_emulator_grpc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pxdo6wQmKt-W6HAf_9_d3Q {'project_path': 'ydb/tests/fq/solomon', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yds, name=py3test] (uid=rnd-l6tcfx3iejuyjb6n): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EuZvAY-l-8HKyxeY34RlQw {'project_path': 'ydb/tests/fq/yds', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part0, name=py3test] (uid=rnd-yvqz9wi2zw9vqy12): Infrastructure error - contact devtools@ for details. Suite build deps: [9_in8NM0C8IIn_zdDNZOoA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part1, name=py3test] (uid=rnd-42v9ckxphz6j1ykb): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [joCy2DrL0ucVtmj6M88UNQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part10, name=py3test] (uid=rnd-wromtaq6jwsk29ah): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [sN_8ak4iCADn6ueF6r4Hgg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part11, name=py3test] (uid=rnd-v36i5j26xh58mx86): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eL1Va3mnYnRN3mGczA_hLw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part12, name=py3test] (uid=rnd-ldlshwk75tknrafe): Infrastructure error - contact devtools@ for details. Suite build deps: [IpyRdZy7Ko-UvnQxgW5AGA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part13, name=py3test] (uid=rnd-gab773z4uc9pdfzb): Infrastructure error - contact devtools@ for details. Suite build deps: [Fa61RteTQSw8BmhPCfIl3w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part14, name=py3test] (uid=rnd-wstkd247jnoa1ues): Infrastructure error - contact devtools@ for details. Suite build deps: [Bb8ZHo1QX9D46rynKT12Qw {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part15, name=py3test] (uid=rnd-5tbnkbl6jkpz3byd): Infrastructure error - contact devtools@ for details. Suite build deps: [TRGvXq1jcxmxrPzpM4jThQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part16, name=py3test] (uid=rnd-5334toeunsy2fo63): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ngPQ92q7hz1mK82oNG0UiQ {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part17, name=py3test] (uid=rnd-oyj7gd8ghuurplgq): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jXk9FHXODGEBoumPECjpig {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part18, name=py3test] (uid=rnd-k0wid5w83as51fk1): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [nwsURidSUBMKZ6OQ6J4i5w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part19, name=py3test] (uid=rnd-pehnused1zyxtrj0): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lmhRPPU_qdlgpR5JjV3K3w {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part2, name=py3test] (uid=rnd-yavnjp4916tny5k1): Infrastructure error - contact devtools@ for details. Suite build deps: [5avbQuRLI0__EjEImT5Izg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part3, name=py3test] (uid=rnd-sp3cvix263swxaws): Infrastructure error - contact devtools@ for details. Suite build deps: [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_USDmtQAF-8N0DSk744bYA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part4, name=py3test] (uid=rnd-i8hnctcxs1h9zafp): Infrastructure error - contact devtools@ for details. Suite build deps: [4AHmjqchxvKooTbryhDnNg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part5, name=py3test] (uid=rnd-js344863lw9ldavz): Infrastructure error - contact devtools@ for details. Suite build deps: [SotyYHEcqgTzw9cRRieBGg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part6, name=py3test] (uid=rnd-eavew2jmwb1alkv0): Infrastructure error - contact devtools@ for details. Suite build deps: [Bkv2jahr8Gve-12SRSFBNA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part7, name=py3test] (uid=rnd-vhgng9pl2ir8vgzf): Infrastructure error - contact devtools@ for details. Suite build deps: [49ZalB0z2EDhInKzir_Ggg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part8, name=py3test] (uid=rnd-a1cyvlpd4h7sarg4): Infrastructure error - contact devtools@ for details. Suite build deps: [VKE60dFALzg8FtUgT2FXkg {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/fq/yt/kqp_yt_file/part9, name=py3test] (uid=rnd-6gfrd8ll57qv2avn): Infrastructure error - contact devtools@ for details. Suite build deps: [SQjTQmbE5eH0VVIojShahA {'project_path': 'ydb/tests/fq/yt/kqp_yt_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/tests/functional/api, name=py3test] (uid=rnd-r64rzx3utv9xidob): Infrastructure error - contact devtools@ for details. Suite build deps: [8rJdyosdsZEwlugb8ib-fw {'project_path': 'ydb/tests/functional/api', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/audit, name=py3test] (uid=rnd-b7bmbhvmmbnjnc0d): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Htg8xB6RJfiHlSQ271G_0Q {'project_path': 'ydb/tests/functional/audit', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/autoconfig, name=py3test] (uid=rnd-3btv2hg8u74zio75): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uyFvGYwW_fYW6s-LhmdgOg {'project_path': 'ydb/tests/functional/autoconfig', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/blobstorage, name=py3test] (uid=rnd-zx91dn3a9iqkt0ds): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [abLLt3AkDqgYYTi0LTeEOw {'project_path': 'ydb/tests/functional/blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/canonical, name=py3test] (uid=rnd-hi9iwqcr1rvg9d18): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [VB2dfHt9ye3kVrpHQkV-Aw {'project_path': 'ydb/tests/functional/canonical', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/clickbench, name=py3test] (uid=rnd-ods73oocbtcs47a1): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z4RERXY_VwWBkDMOWgtgOw {'project_path': 'ydb/tests/functional/clickbench', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/cms, name=py3test] (uid=rnd-itvf9nk1cmphrd9m): Infrastructure error - contact devtools@ for details. Suite build deps: [2nTzn1ldkfh9muS7HZuObw {'project_path': 'ydb/tests/functional/cms', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/config, name=py3test] (uid=rnd-fxfs2tkv0kamk68a): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [gzRqw1qqNSHhekgPbvLXSw {'project_path': 'ydb/tests/functional/config', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/encryption, name=py3test] (uid=rnd-lkp6wsr75ljd2zav): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jvR007kkGzrimi7jmN-IDQ {'project_path': 'ydb/tests/functional/encryption', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/hive, name=py3test] (uid=rnd-btgldc62vs0fooqf): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [T4Sz8jcKT9R9sNjx3L0pOg {'project_path': 'ydb/tests/functional/hive', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/limits, name=py3test] (uid=rnd-qq679dlsjz3r9fg4): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [HmcNyggn-Iwjvm8EgX00KA {'project_path': 'ydb/tests/functional/limits', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/minidumps, name=py3test] (uid=rnd-oacl4csxoaqm4ouy): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [gseJwhWBmRoxLcbkfOS_2A {'project_path': 'ydb/tests/functional/minidumps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/postgresql, name=py3test] (uid=rnd-3a3v7p22i2q6fm6h): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Z39cdQH9E_vPJyIH2W5Szg {'project_path': 'ydb/tests/functional/postgresql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [qMN0W7hm_mR5alJWAYWexA {'project_path': 'ydb/tests/functional/postgresql/psql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/query_cache, name=py3test] (uid=rnd-j0ihwacsaq5nnknv): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Ft3nCnA2tIVq4ZoTgpm_8g {'project_path': 'ydb/tests/functional/query_cache', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/rename, name=py3test] (uid=rnd-y6btrfmoon1ac2os): Infrastructure error - contact devtools@ for details. Suite build deps: [9hXHKoeAzz6mU1FLmf3gPA {'project_path': 'ydb/tests/functional/rename', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/restarts, name=py3test] (uid=rnd-mof6mbykkjebjlgp): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BxknTsFkUZ5Dr-QccbeB_g {'project_path': 'ydb/tests/functional/restarts', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/scheme_shard, name=py3test] (uid=rnd-hd6dfxzgt6nn3fvm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wUSeFpaSI_AwGR5d9cmHug {'project_path': 'ydb/tests/functional/scheme_shard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/scheme_tests, name=py3test] (uid=rnd-5cbwvsjey3pw4g2f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [k7sjN5u_QrKtUDxlV16CyA {'project_path': 'ydb/tests/functional/scheme_tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/script_execution, name=py3test] (uid=rnd-2biy6treweg8es68): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cgjTRdkrZ_4d7BRJ0eJnig {'project_path': 'ydb/tests/functional/script_execution', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serializable, name=py3test] (uid=rnd-ysig7rvd2z84e4xq): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [GkHyQNo5oKNB7jVcO3QwCg {'project_path': 'ydb/tests/functional/serializable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/serverless, name=py3test] (uid=rnd-m88pk9cg58jwbhs0): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [lZiVbNttqYAg8FRbEMdofA {'project_path': 'ydb/tests/functional/serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/cloud, name=py3test] (uid=rnd-qvtg82251aa127an): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jvOkQA9X_2xMtbCxc2DEIA {'project_path': 'ydb/tests/functional/sqs/cloud', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/common, name=py3test] (uid=rnd-pa47b67m69vvx36f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [JhgeeDe0vMIfPxdCMs0pnQ {'project_path': 'ydb/tests/functional/sqs/common', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/large, name=py3test] (uid=rnd-lxjmf8vvxwbo5k2m): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LrJOODP0_b-tm3QwqNBhhQ {'project_path': 'ydb/tests/functional/sqs/large', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/fifo, name=py3test] (uid=rnd-5pybi7nwjm0q8b8t): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [T_WmT_UFGgVQ4nKbQJqFcQ {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/fifo', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/merge_split_common_table/std, name=py3test] (uid=rnd-63lyfanr89tosa69): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Yncn6JscsW36xAZ9rB4U3Q {'project_path': 'ydb/tests/functional/sqs/merge_split_common_table/std', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/messaging, name=py3test] (uid=rnd-u5dzgnb4u4k750ri): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [PTLwQVzmaYU1HKzWNIrkbg {'project_path': 'ydb/tests/functional/sqs/messaging', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/multinode, name=py3test] (uid=rnd-fnsgeulgog22vksj): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [YKojzFSQQFch1ipu_je4gg {'project_path': 'ydb/tests/functional/sqs/multinode', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sqs/with_quotas, name=py3test] (uid=rnd-bkwsbxfxkopmunjh): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [kebFv3kZRpf3nS6N5YlGow {'project_path': 'ydb/tests/functional/sqs/with_quotas', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tenants, name=py3test] (uid=rnd-74xhvsqbru8adzvi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [RIadDrCh0I16yrGqcrdrMA {'project_path': 'ydb/tests/functional/tenants', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/tpc/medium, name=py3test] (uid=rnd-1h96d4be7q01m0ca): Infrastructure error - contact devtools@ for details. Suite build deps: [3NiieuagZAv2XidnTZGWtA {'project_path': 'ydb/tests/functional/tpc/medium', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ttl, name=py3test] (uid=rnd-54ku8hvsv2tvj55b): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mQznU_JUfbVnTr0x7pqQJw {'project_path': 'ydb/tests/functional/ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/wardens, name=py3test] (uid=rnd-biaghif9z4d40rg8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [i0M1jluoKuwMp7bFzax8HA {'project_path': 'ydb/tests/functional/wardens', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/ydb_cli, name=py3test] (uid=rnd-sh4ryb6jt8qwjagk): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [OuZpGgEj7jnCgkIGQwDYqw {'project_path': 'ydb/tests/functional/ydb_cli', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/library/ut, name=py3test] (uid=rnd-o1j9kl2txxn15xcm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [E79JQHP1kTkHXwC-essinw {'project_path': 'ydb/tests/library/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap, name=py3test] (uid=rnd-feh1txcw6svdjqd8): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [JIP_tcgnLjoMby5_zelnWg {'project_path': 'ydb/tests/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/olap/scenario, name=py3test] (uid=rnd-p7awj5zhrsfr2w5u): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wR3fDyzrd1PP7enFLtlyRQ {'project_path': 'ydb/tests/olap/scenario', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/olap/ttl_tiering, name=py3test] (uid=rnd-e2l53yavhix5w1aa): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [khozwlRAI-Ug_vRqgHg45g {'project_path': 'ydb/tests/olap/ttl_tiering', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/sql, name=py3test] (uid=rnd-fen4xt8yp018kqaz): Infrastructure error - contact devtools@ for details. Suite build deps: [16fDwbQZWZCGXLvPd7Lxsw {'project_path': 'ydb/tests/sql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/kv/tests, name=py3test] (uid=rnd-bhv59sc25wijhj0v): Infrastructure error - contact devtools@ for details. Suite build deps: [BL5zslKUCHeCwdIttX-FsA {'project_path': 'ydb/tests/stress/kv/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/log/tests, name=py3test] (uid=rnd-uh7i9p54hgyfn56c): Infrastructure error - contact devtools@ for details. Suite build deps: [1ANIX2QRHr0yNhdjlGtmAg {'project_path': 'ydb/tests/stress/log/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/olap_workload/tests, name=py3test] (uid=rnd-08latk87jfjbp2xl): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [kB-aQv6tay8RJyK7h4nyTw {'project_path': 'ydb/tests/stress/olap_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/oltp_workload/tests, name=py3test] (uid=rnd-5dgo7boay7yaf290): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fx4HsD3j_bqIBD15FcPfNA {'project_path': 'ydb/tests/stress/oltp_workload/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/simple_queue/tests, name=py3test] (uid=rnd-mwuwcgmmv5ifj1c4): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [EbOS91ryZ5LR5qz-_cPj-g {'project_path': 'ydb/tests/stress/simple_queue/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/stress/transfer/tests, name=py3test] (uid=rnd-rimw1r1sazjgez2m): Infrastructure error - contact devtools@ for details. Suite build deps: [AIDIv6Yn994TwYyhMytsIg {'project_path': 'ydb/tests/stress/transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wVaQDKg1-xMsLuwqzQYU6Q {'project_path': 'ydb/tests/stress/transfer/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/tools/kqprun/tests, name=py3test] (uid=rnd-igm0y9naod1oc9vx): Infrastructure error - contact devtools@ for details. Suite build deps: [Hs6--qc0PmD1eVf6W887Ng {'project_path': 'ydb/tests/tools/kqprun/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WQjInlufSYnoPXoSCAljNw {'project_path': 'ydb/tests/tools/kqprun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Z9YUszgy9LOHFT6s1RgJYw {'project_path': 'ydb/tests/tools/kqprun/tests', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/nemesis/ut, name=py3test] (uid=rnd-astxv1c0p8hnu8d9): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Gyb1KY2jXptkKN3kcGUOnw {'project_path': 'ydb/tests/tools/nemesis/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/tools/pq_read/test, name=py3test] (uid=rnd-y9cfdnuj0y7m1ltm): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [aKmCNyvhFGCLB_FqSLeqiA {'project_path': 'ydb/tests/tools/pq_read/test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [cyXkpl-lM0MVtLRxqtRUwQ {'project_path': 'ydb/tests/tools/pq_read', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part0, name=pytest] (uid=rnd-2ddsln7pcwt425db): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [uyXg1XZGIQbQIwPEpXBtUA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part1, name=pytest] (uid=rnd-c6jomkna9cs8rb43): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tqQb7Bf1JhhXPL0Wqo6lVQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part10, name=pytest] (uid=rnd-e6cm33lxn4sahs3g): Infrastructure error - contact devtools@ for details. Suite build deps: [Wdy0Z0DpKWdwwgR5L3_Urw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part11, name=pytest] (uid=rnd-pf10eeknn07x45rw): Infrastructure error - contact devtools@ for details. Suite build deps: [0IluDTu_dg_ti3frsDcfNA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part11', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part12, name=pytest] (uid=rnd-9c8jehdbtvvv2uiy): Infrastructure error - contact devtools@ for details. Suite build deps: [4xI4-DTQloI_XoPv6KD3Ug {'project_path': 'ydb/library/yql/tests/sql/dq_file/part12', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part13, name=pytest] (uid=rnd-hpi1bvslkhptfj6x): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [wyFduD84wQfYkq9ZIOzomA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part13', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part14, name=pytest] (uid=rnd-w5ydpoaluj1hu56z): Infrastructure error - contact devtools@ for details. Suite build deps: [Y6giBwe8oDL9vtYXvBa2_g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part14', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part15, name=pytest] (uid=rnd-18dtq4jwbijt16bf): Infrastructure error - contact devtools@ for details. Suite build deps: [Cc-9VW4pYo5gsVW5B2GAOA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part15', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part16, name=pytest] (uid=rnd-nq3mozj1ld7zw5xj): Infrastructure error - contact devtools@ for details. Suite build deps: [XbC8QBM1VdNwfOb-2VE0bA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part16', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part17, name=pytest] (uid=rnd-4f31cqkufngs44zi): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [g2T-TnqFgDJQsyzEGWIJFw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part17', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part18, name=pytest] (uid=rnd-vcuj5sn0vlxszshk): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [oN9p2GA_dT7QtsgFX-c2ag {'project_path': 'ydb/library/yql/tests/sql/dq_file/part18', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part19, name=pytest] (uid=rnd-hyewupw0qtgf06aj): Infrastructure error - contact devtools@ for details. Suite build deps: [JeJR1fKrW_4qTyuEf0TfdQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part19', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part2, name=pytest] (uid=rnd-jich0m3qguissvoc): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hr6pltADlp7T1vRJnuxD1g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part3, name=pytest] (uid=rnd-zaoaldorivs9xzem): Infrastructure error - contact devtools@ for details. Suite build deps: [XdZ_f_cU7iXjziDkuKKLPw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part4, name=pytest] (uid=rnd-3ar2kgy9cgy4hwcr): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yDLXVQijS38DOB9uQKrsnQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part5, name=pytest] (uid=rnd-zy1mwb1jwbwf1uo1): Infrastructure error - contact devtools@ for details. Suite build deps: [-GOj-NiSAuFZnzKcu9gq1g {'project_path': 'ydb/library/yql/tests/sql/dq_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part6, name=pytest] (uid=rnd-oo0iu8l3efq61i3x): Infrastructure error - contact devtools@ for details. Suite build deps: [4f9PBi_ATGh2O2FlidorSw {'project_path': 'ydb/library/yql/tests/sql/dq_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part7, name=pytest] (uid=rnd-i9u41z5nvld6yrdi): Infrastructure error - contact devtools@ for details. Suite build deps: [-121gu5qfhRwgHe7JmJVDA {'project_path': 'ydb/library/yql/tests/sql/dq_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part8, name=pytest] (uid=rnd-yhrprxws2ufg6k14): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [pvsQcxyjHwzYajsslwneRQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/dq_file/part9, name=pytest] (uid=rnd-fhmo8lx91tulllm2): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [qtJZNH5J3fSCqpmr6WnWzQ {'project_path': 'ydb/library/yql/tests/sql/dq_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part0, name=pytest] (uid=rnd-xocow37q5ukms5vw): Infrastructure error - contact devtools@ for details. Suite build deps: [4UNJA1nGD2xe1c1YjuAQeQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part0', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part1, name=pytest] (uid=rnd-ldzecxobvubws8ra): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [hN-rXNhRAZkmOkVtNfCL6g {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part1', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part10, name=pytest] (uid=rnd-v23a7k0zfqiijd6i): Infrastructure error - contact devtools@ for details. Suite build deps: [0CCikKSfg3ADkb_spahYLw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part10', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part2, name=pytest] (uid=rnd-hz6qo5t1u2bnijnr): Infrastructure error - contact devtools@ for details. Suite build deps: [Vnvve7rjKvFhsVUzrwwycQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part3, name=pytest] (uid=rnd-s3rhd683w4efh6w0): Infrastructure error - contact devtools@ for details. Suite build deps: [Abd39QgMVw34pXuV5nqRZA {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part4, name=pytest] (uid=rnd-yq9zbjsezptg3qo7): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fwcUoRt9F6gqvjFGVFvlqQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part4', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part5, name=pytest] (uid=rnd-mvhwyav7o26waxg4): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jLbN5VvPnP5y8BiWxj5bfg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part5', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part6, name=pytest] (uid=rnd-g3xabh61d8h9f42v): Infrastructure error - contact devtools@ for details. Suite build deps: [WRJ4bPoaWfndxyHd8r1kyg {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part6', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part7, name=pytest] (uid=rnd-i2mz7y14fql179fm): Infrastructure error - contact devtools@ for details. Suite build deps: [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mhUl4Xg5fEv3rHp50ppuWQ {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part7', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part8, name=pytest] (uid=rnd-h1vxc8j7laq3nuas): Infrastructure error - contact devtools@ for details. Suite build deps: [88VwWzu4Yu_JzhyA5YGhTw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part8', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/library/yql/tests/sql/hybrid_file/part9, name=pytest] (uid=rnd-dic294crq7134k5j): Infrastructure error - contact devtools@ for details. Suite build deps: [FT66HLSOEBqPpgHScnINPw {'project_path': 'ydb/library/yql/tests/sql/hybrid_file/part9', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bz3scc5NMOTE8dAF8w9Rfw {'project_path': 'yql/essentials/tests/common/test_framework/udfs_deps', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [fcCI6Gux559S8dfdT-yQzg {'project_path': 'ydb/library/yql/tools/dqrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [riyDLBGipX5f0FVBQcno8Q {'project_path': 'yql/tools/yqlrun', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vpYT1cGix9kwHTC4rh7NDQ {'project_path': 'yql/essentials/tools/astdiff', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [yWWtdZs2xRPIXTwbDjj24Q {'project_path': 'yql/essentials/udfs/test/test_import', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/apps/ydb/ut, name=unittest] (uid=rnd-tvv9aov9vmzwgewi): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [LvTXiZIbhpuMh0gJimK0vw {'project_path': 'ydb/apps/ydb', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mHilgpo9TeBC_7MhIJ7tzw {'project_path': 'ydb/apps/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/actorlib_impl/ut, name=unittest] (uid=rnd-e6hlkw0xi70o3hx1): Infrastructure error - contact devtools@ for details. Suite build deps: [1vgsv2-KHR-wX5oK8dGQ1w {'project_path': 'ydb/core/actorlib_impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/backup/impl/ut_table_writer, name=unittest] (uid=rnd-aa6wl3ljavu561un): Infrastructure error - contact devtools@ for details. Suite build deps: [HhiYFniY6UyPQHwrOLiAXA {'project_path': 'ydb/core/backup/impl/ut_table_writer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/base/ut, name=unittest] (uid=rnd-baqxx7rexv47cmcw): Infrastructure error - contact devtools@ for details. Suite build deps: [5-oA5fo6hDt5AaY1_hvhVA {'project_path': 'ydb/core/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/backpressure/ut, name=unittest] (uid=rnd-4ues06i65kkgezsa): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xNXz-pD6-Mtl_sTZfmFnJQ {'project_path': 'ydb/core/blobstorage/backpressure/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut, name=unittest] (uid=rnd-8lf9aj6q6n0c8936): Infrastructure error - contact devtools@ for details. Suite build deps: [noo-s1pCctlAldfXPRgphQ {'project_path': 'ydb/core/blobstorage/dsproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_fat, name=unittest] (uid=rnd-hz2tvwvrlhqznzor): Infrastructure error - contact devtools@ for details. Suite build deps: [l6nT9IbwsP9nheUT615PRQ {'project_path': 'ydb/core/blobstorage/dsproxy/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_ftol, name=unittest] (uid=rnd-p0zam6c8271fi3cx): Infrastructure error - contact devtools@ for details. Suite build deps: [guRvh83k--S0q6PxiNkJKg {'project_path': 'ydb/core/blobstorage/dsproxy/ut_ftol', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/dsproxy/ut_strategy, name=unittest] (uid=rnd-kdvu3tej10leg60u): Infrastructure error - contact devtools@ for details. Suite build deps: [cEDJxM8LlXa4CUQY3ypDFw {'project_path': 'ydb/core/blobstorage/dsproxy/ut_strategy', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/groupinfo/ut, name=unittest] (uid=rnd-gqk2k5zyflkafjwb): Infrastructure error - contact devtools@ for details. Suite build deps: [6VzOt66IvPANbSE1DeuqyA {'project_path': 'ydb/core/blobstorage/groupinfo/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut, name=unittest] (uid=rnd-523yy2cu2ix7fv5r): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v1HotOQCRtPtLTlnVo_liQ {'project_path': 'ydb/core/blobstorage/nodewarden/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/blobstorage/nodewarden/ut_sequence, name=unittest] (uid=rnd-fcljx9mcqldxv247): Infrastructure error - contact devtools@ for details. Suite build deps: [tOeqJYh6ptgQ4STat_XJmw {'project_path': 'ydb/core/blobstorage/nodewarden/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/pdisk/ut, name=unittest] (uid=rnd-340kvnvkkf7iraa5): Infrastructure error - contact devtools@ for details. Suite build deps: [57UB9kFfll7xo9iwxlh2VQ {'project_path': 'ydb/core/blobstorage/pdisk/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage, name=unittest] (uid=rnd-wni2bcmo12tx7c5k): Infrastructure error - contact devtools@ for details. Suite build deps: [h1_yhLOiJ41wJWGwvZXR5A {'project_path': 'ydb/core/blobstorage/ut_blobstorage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_balancing, name=unittest] (uid=rnd-9qpmkqas3jykhzkk): Infrastructure error - contact devtools@ for details. Suite build deps: [mYXZxrxEr0XhLZmSBZHBYA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_balancing', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_blob_depot, name=unittest] (uid=rnd-afhqng1eiefocav1): Infrastructure error - contact devtools@ for details. Suite build deps: [Ts3C4fHSFblM2KfayG8Vmw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_blob_depot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_donor, name=unittest] (uid=rnd-kucvasx8vnd0mfl1): Infrastructure error - contact devtools@ for details. Suite build deps: [cWazmh7lvaMV1ZFEpKL74Q {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_donor', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_huge, name=unittest] (uid=rnd-1iwb7syddcpa4dbb): Infrastructure error - contact devtools@ for details. Suite build deps: [qcAm6mZPduUZjC1ROmQXFA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_huge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk, name=unittest] (uid=rnd-cml4r0y8qgxe0kmz): Infrastructure error - contact devtools@ for details. Suite build deps: [SmeElTz9f9tG0Dumn6uxew {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk, name=unittest] (uid=rnd-hbfxkq9ss10bo5vp): Infrastructure error - contact devtools@ for details. Suite build deps: [DDS5n_Iw-36JWY-2BwfUhw {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk, name=unittest] (uid=rnd-mrguz3px36xkjm1z): Infrastructure error - contact devtools@ for details. Suite build deps: [CSbBQ11xyJA9O2xZ7p63bQ {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart, name=unittest] (uid=rnd-bjaimq2etx7ttdde): Infrastructure error - contact devtools@ for details. Suite build deps: [iMX5s0KaKMcxLgMsCb-hMA {'project_path': 'ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_testshard, name=unittest] (uid=rnd-neeva3jz0imnpbhm): Infrastructure error - contact devtools@ for details. Suite build deps: [Fz2eJXhlysL9FhRVSM1mOg {'project_path': 'ydb/core/blobstorage/ut_testshard', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk, name=unittest] (uid=rnd-cymmaol0y15rimko): Infrastructure error - contact devtools@ for details. Suite build deps: [1m1bfulxuRlbQnWSBJGFuQ {'project_path': 'ydb/core/blobstorage/ut_vdisk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/ut_vdisk2, name=unittest] (uid=rnd-5ow4b6h6ou0l4khc): Infrastructure error - contact devtools@ for details. Suite build deps: [eZoihKLz4i8Oqw2AV0UC8Q {'project_path': 'ydb/core/blobstorage/ut_vdisk2', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/anubis_osiris/ut, name=unittest] (uid=rnd-fb14r5kn0ueoq3u2): Infrastructure error - contact devtools@ for details. Suite build deps: [-Z42BwdsSSM8aIJo8PMguQ {'project_path': 'ydb/core/blobstorage/vdisk/anubis_osiris/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hulldb/base/ut, name=unittest] (uid=rnd-fhfj5b01txh62p6e): Infrastructure error - contact devtools@ for details. Suite build deps: [XjhLRQx0fjAxON1IjTAg3Q {'project_path': 'ydb/core/blobstorage/vdisk/hulldb/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hulldb/cache_block/ut, name=unittest] (uid=rnd-gb9xwb2xfgtrr148): Infrastructure error - contact devtools@ for details. Suite build deps: [JPAzpeYGYKgzDRr9TB6q8Q {'project_path': 'ydb/core/blobstorage/vdisk/hulldb/cache_block/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/hullop/ut, name=unittest] (uid=rnd-l7cmorfzxiv13d6p): Infrastructure error - contact devtools@ for details. Suite build deps: [ncrHJJ_bV2uozY2KCjxcVw {'project_path': 'ydb/core/blobstorage/vdisk/hullop/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/ingress/ut, name=unittest] (uid=rnd-tobgtu22wm22ojtl): Infrastructure error - contact devtools@ for details. Suite build deps: [0qllbksu--A0j3in-X_r5Q {'project_path': 'ydb/core/blobstorage/vdisk/ingress/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/skeleton/ut, name=unittest] (uid=rnd-wgmrrbb21xhr5pxe): Infrastructure error - contact devtools@ for details. Suite build deps: [VVNUu-iMzbZTL5tbr0lJdg {'project_path': 'ydb/core/blobstorage/vdisk/skeleton/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/syncer/ut, name=unittest] (uid=rnd-phxm1aw1axxmb3sa): Infrastructure error - contact devtools@ for details. Suite build deps: [t6VHY1z6ma_-uzMUCN9fNg {'project_path': 'ydb/core/blobstorage/vdisk/syncer/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/blobstorage/vdisk/synclog/ut, name=unittest] (uid=rnd-kpkjds918rnaszvb): Infrastructure error - contact devtools@ for details. Suite build deps: [tDYLZJdsvjjr6QiGsDT68A {'project_path': 'ydb/core/blobstorage/vdisk/synclog/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/minikql_compile/ut, name=unittest] (uid=rnd-c3mjv6qb9xr5p0yo): Infrastructure error - contact devtools@ for details. Suite build deps: [e-0akBM95roIUXFB46dGdA {'project_path': 'ydb/core/client/minikql_compile/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/server/ut, name=unittest] (uid=rnd-mb86h24z89pygnwj): Infrastructure error - contact devtools@ for details. Suite build deps: [_OWjI7UHPzioiw2p7CNEGQ {'project_path': 'ydb/core/client/server/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/client/ut, name=unittest] (uid=rnd-c9823qr9l2pmhmch): Infrastructure error - contact devtools@ for details. Suite build deps: [9N4hc78AAMN-bOwmzlB_BQ {'project_path': 'ydb/core/client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/ut, name=unittest] (uid=rnd-rxe2u014n3odnj9t): Infrastructure error - contact devtools@ for details. Suite build deps: [g7jNN0vMWgMW4AsNPxa1Yg {'project_path': 'ydb/core/cms/console/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/console/validators/ut, name=unittest] (uid=rnd-na74rtgiognhnbeh): Infrastructure error - contact devtools@ for details. Suite build deps: [1PqmQtXaCD6ZezkfcQ7g-A {'project_path': 'ydb/core/cms/console/validators/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut, name=unittest] (uid=rnd-wdg4nyllqca13rgu): Infrastructure error - contact devtools@ for details. Suite build deps: [ZfALacHLE_As6rcubcGKJg {'project_path': 'ydb/core/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel, name=unittest] (uid=rnd-g5bqpg98gm6c509k): Infrastructure error - contact devtools@ for details. Suite build deps: [MuX9aYdNdEf8n0vHSkydDQ {'project_path': 'ydb/core/cms/ut_sentinel', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/cms/ut_sentinel_unstable, name=unittest] (uid=rnd-9k2uoa81xkzhk73e): Infrastructure error - contact devtools@ for details. Suite build deps: [-cNMBXSS_JYVHMgjXO6LCg {'project_path': 'ydb/core/cms/ut_sentinel_unstable', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/control/ut, name=unittest] (uid=rnd-azuktj5du5406h7b): Infrastructure error - contact devtools@ for details. Suite build deps: [eJQ49yi7FjWFkwtkV9VeIQ {'project_path': 'ydb/core/control/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/erasure/ut, name=unittest] (uid=rnd-jz7lng855v7kxg45): Infrastructure error - contact devtools@ for details. Suite build deps: [OK8cLUSOEmStI6yaLy5ySA {'project_path': 'ydb/core/erasure/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/external_sources/s3/ut, name=unittest] (uid=rnd-nixycrq78gtwcfyu): Infrastructure error - contact devtools@ for details. Suite build deps: [5Q42tZ7PoNSpXtQn8VijNQ {'project_path': 'library/recipes/docker_compose/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [DQ5nj5QlLB92bhWG641i4A {'project_path': 'library/recipes/docker_compose', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [mLGyzs24isAiHycSns2hqA {'project_path': 'ydb/core/external_sources/s3/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpoint_storage/ut, name=unittest] (uid=rnd-mn1o8gkf246eaolv): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [W8refynzIGb4hpbozf-I3g {'project_path': 'ydb/core/fq/libs/checkpoint_storage/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/checkpointing/ut, name=unittest] (uid=rnd-52wn4w6yv5h1s5hx): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yRpKIjbewAHm94DLJ5nHwA {'project_path': 'ydb/core/fq/libs/checkpointing/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/fq/libs/common/ut, name=unittest] (uid=rnd-wo4sbcjmt0bml3l6): Infrastructure error - contact devtools@ for details. Suite build deps: [CwkxALwq0AEaMmkF4UGh8w {'project_path': 'ydb/core/fq/libs/common/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/control_plane_proxy/ut, name=unittest] (uid=rnd-jom1xgogrc9xjwr6): Infrastructure error - contact devtools@ for details. Suite build deps: [FucJqSmjJUY8C7_8zw8DPw {'project_path': 'ydb/core/fq/libs/control_plane_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/result_formatter/ut, name=unittest] (uid=rnd-5w6vve7m0w1uacwc): Infrastructure error - contact devtools@ for details. Suite build deps: [IArvb-ATN2eL9TBLLpBsbg {'project_path': 'ydb/core/fq/libs/result_formatter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/format_handler/ut, name=unittest] (uid=rnd-qqym9h3c52no244f): Infrastructure error - contact devtools@ for details. Suite build deps: [jMkSgkMDUcv7QZWpnhZtLA {'project_path': 'ydb/core/fq/libs/row_dispatcher/format_handler/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/row_dispatcher/ut, name=unittest] (uid=rnd-dzere0b78wj4fw1s): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [eyj9hqF4qrAbKg5UVJzKKw {'project_path': 'ydb/core/fq/libs/row_dispatcher/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/fq/libs/ydb/ut, name=unittest] (uid=rnd-m6vn1tmv2ej7ax0l): Infrastructure error - contact devtools@ for details. Suite build deps: [-Zl0PENHkEo8-uaFaQ6cgw {'project_path': 'ydb/core/fq/libs/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/shard/ut, name=unittest] (uid=rnd-dwwgumxj2x8vfqyl): Infrastructure error - contact devtools@ for details. Suite build deps: [rUq2HS9VxmOn-HnxHtKAFw {'project_path': 'ydb/core/graph/shard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/graph/ut, name=unittest] (uid=rnd-m5omx5lzxr7d0yku): Infrastructure error - contact devtools@ for details. Suite build deps: [2rymvZn8nhFTW4Sl4eGJuw {'project_path': 'ydb/core/graph/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/tablet/ut, name=unittest] (uid=rnd-yk5vzq837kre28e9): Infrastructure error - contact devtools@ for details. Suite build deps: [NLXiAOwvFjQLNhqXilY71A {'project_path': 'ydb/core/grpc_services/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/grpc_services/ut, name=unittest] (uid=rnd-vxe3g53e64sc5m8o): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zFGyEvkiYtyHeenCtD9nPw {'project_path': 'ydb/core/grpc_services/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/grpc_streaming/ut, name=unittest] (uid=rnd-h6yphcgy8nzhtzxi): Infrastructure error - contact devtools@ for details. Suite build deps: [OzdoudPJ5WI8khB0kAavQQ {'project_path': 'ydb/core/grpc_streaming/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/health_check/ut, name=unittest] (uid=rnd-kh6wr3g1pc3mxyk8): Infrastructure error - contact devtools@ for details. Suite build deps: [FhhCZo0SwAU5qaV-H3FQcw {'project_path': 'ydb/core/health_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/http_proxy/ut, name=unittest] (uid=rnd-h7qvv99uh0nq9osd): Infrastructure error - contact devtools@ for details. Suite build deps: [08Qmtdy3roDQRRd_4XtP8w {'project_path': 'ydb/core/http_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/http_proxy/ut/inside_ydb_ut, name=unittest] (uid=rnd-o6e7br7a0qux0fnp): Infrastructure error - contact devtools@ for details. Suite build deps: [q3RBV_Z1N7rM2Vh8kTF57g {'project_path': 'ydb/core/http_proxy/ut/inside_ydb_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kafka_proxy/ut, name=unittest] (uid=rnd-g2w3stctymfxc5zm): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [txptHr_iTnb1KjOVoG1-4A {'project_path': 'ydb/core/kafka_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kesus/tablet/ut, name=unittest] (uid=rnd-egegkq3bp8m8uhsa): Infrastructure error - contact devtools@ for details. Suite build deps: [L_IrLYUP-Uzh7xavgUAwAg {'project_path': 'ydb/core/kesus/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut, name=unittest] (uid=rnd-2s309gcdhkvkhewx): Infrastructure error - contact devtools@ for details. Suite build deps: [q95q2QvBngTW3_thXIuq2w {'project_path': 'ydb/core/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/keyvalue/ut_trace, name=unittest] (uid=rnd-2vayida1eudeupzk): Infrastructure error - contact devtools@ for details. Suite build deps: [GLhZcjGUzLpSRycyVLpthQ {'project_path': 'ydb/core/keyvalue/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/proxy_service/ut, name=unittest] (uid=rnd-lq5bpuqcm8su1c8d): Infrastructure error - contact devtools@ for details. Suite build deps: [2-MS6xxFJwrOXHJpcTz-qA {'project_path': 'ydb/core/kqp/proxy_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/tests/kikimr_tpch, name=unittest] (uid=rnd-nd7agsijezlxjigp): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [KGoNEbTTIHSSfp8lA2hRmA {'project_path': 'yql/essentials/udfs/common/string', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [LY7YyY0X2K-Pu3gU2j8Vhw {'project_path': 'ydb/library/yql/udfs/common/datetime', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [c8D_-SD2Srz7qbMAzjrsOA {'project_path': 'yql/essentials/udfs/common/datetime2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [kyKtgnZz-pAr_NCz6YPrMg {'project_path': 'ydb/core/kqp/tests/kikimr_tpch', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ul_t95we-PSim33xXqRZAg {'project_path': 'yql/essentials/udfs/common/pire', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}], [y_l-_UH8k8vcNqfTjOTpTw {'project_path': 'yql/essentials/udfs/common/re2', 'platform': 'default-linux-x86_64-relwithdebinfo-pic', 'tags': ['default-linux-x86_64', 'relwithdebinfo', 'pic']}] Warn: Test [project=ydb/core/kqp/ut/batch_operations, name=unittest] (uid=rnd-864iju0w52b5q9ym): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u8iz401A4_WTq8wlnw4GBg {'project_path': 'ydb/core/kqp/ut/batch_operations', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/cost, name=unittest] (uid=rnd-6y3ae4cee9ejlm40): Infrastructure error - contact devtools@ for details. Suite build deps: [I9-qMhKVmMjldfaO63O9Hg {'project_path': 'ydb/core/kqp/ut/cost', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data, name=unittest] (uid=rnd-mkqlue8l1247zupo): Infrastructure error - contact devtools@ for details. Suite build deps: [TKq6-vG4oLBi8XfODLIaKw {'project_path': 'ydb/core/kqp/ut/data', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/data_integrity, name=unittest] (uid=rnd-q1apcycgtu8fo8vi): Infrastructure error - contact devtools@ for details. Suite build deps: [cMJMnG8gdcf0WGS2VkAAmA {'project_path': 'ydb/core/kqp/ut/data_integrity', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/generic_ut, name=unittest] (uid=rnd-69n98bn293pwu56z): Infrastructure error - contact devtools@ for details. Suite build deps: [Ergvqe2N5MYd6t4lfM3coQ {'project_path': 'ydb/core/kqp/ut/federated_query/generic_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/federated_query/s3, name=unittest] (uid=rnd-x6f70ux3n8etajfl): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [WBaOymL41lGZ_vqq_p-kaA {'project_path': 'ydb/core/kqp/ut/federated_query/s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/idx_test, name=unittest] (uid=rnd-mzv1g7bv7ctzwavx): Infrastructure error - contact devtools@ for details. Suite build deps: [ej78KEpFKy2JALBkpM9-sQ {'project_path': 'ydb/core/kqp/ut/idx_test', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/indexes, name=unittest] (uid=rnd-jdvoiu9p2iuljwgo): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u9Sab47po9EhvFf4R7niBA {'project_path': 'ydb/core/kqp/ut/indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/ut/join, name=unittest] (uid=rnd-bqsn3xfshqgvt63v): Infrastructure error - contact devtools@ for details. Suite build deps: [Lu60nPaZtluxn2Gl7XnugA {'project_path': 'ydb/core/kqp/ut/join', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/olap, name=unittest] (uid=rnd-z7vqr7dfn81tso99): Infrastructure error - contact devtools@ for details. Suite build deps: [G75Mol3eu1QtVyoNo98H-A {'project_path': 'ydb/core/kqp/ut/olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/opt, name=unittest] (uid=rnd-gwxwk6uwka1h22dr): Infrastructure error - contact devtools@ for details. Suite build deps: [c6lRt6jQ_xMQ9wPl28RPMw {'project_path': 'ydb/core/kqp/ut/opt', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/perf, name=unittest] (uid=rnd-vur7a2qzedc196s4): Infrastructure error - contact devtools@ for details. Suite build deps: [Wr2nGSPzxvspueD8sHSdkw {'project_path': 'ydb/core/kqp/ut/perf', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/pg, name=unittest] (uid=rnd-u8k0fj3tvn16mk0m): Infrastructure error - contact devtools@ for details. Suite build deps: [pYxvZma5GJODj5ZUNpjkYg {'project_path': 'ydb/core/kqp/ut/pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/query, name=unittest] (uid=rnd-ifvbgeapf4948rgu): Infrastructure error - contact devtools@ for details. Suite build deps: [TesBczaM5360T80Jg2899w {'project_path': 'ydb/core/kqp/ut/query', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scan, name=unittest] (uid=rnd-13pg3ilao6a9c3k3): Infrastructure error - contact devtools@ for details. Suite build deps: [MbhEKB4VGXPsy5BVZUxyMw {'project_path': 'ydb/core/kqp/ut/scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/scheme, name=unittest] (uid=rnd-g5v1uk707f5is8j9): Infrastructure error - contact devtools@ for details. Suite build deps: [1yjRMwM31Gbu3iV5ChgQjg {'project_path': 'ydb/core/kqp/ut/scheme', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/service, name=unittest] (uid=rnd-62dy8phlww5x26vh): Infrastructure error - contact devtools@ for details. Suite build deps: [mgTu4PFjkrO96uDH0jMZeA {'project_path': 'ydb/core/kqp/ut/service', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/sysview, name=unittest] (uid=rnd-8t7cdcgenln8p50z): Infrastructure error - contact devtools@ for details. Suite build deps: [pjpgpmo9q5T1dPKn0ztzig {'project_path': 'ydb/core/kqp/ut/sysview', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/tx, name=unittest] (uid=rnd-e836cv8ullor4a0l): Infrastructure error - contact devtools@ for details. Suite build deps: [-OsCQsf2eQFop1Zi7bA29A {'project_path': 'ydb/core/kqp/ut/tx', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/view, name=unittest] (uid=rnd-tqbmwv8hvr1dl63u): Infrastructure error - contact devtools@ for details. Suite build deps: [sOm74sP75q8XjSGGaAF-yA {'project_path': 'ydb/core/kqp/ut/view', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/kqp/ut/yql, name=unittest] (uid=rnd-w5re9rnzzs886tas): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [ujp-4rKk5woqn6GJcJJzJA {'project_path': 'ydb/core/kqp/ut/yql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/kqp/workload_service/ut, name=unittest] (uid=rnd-2sainyu61gf11ols): Infrastructure error - contact devtools@ for details. Suite build deps: [QsPQx9IALj-WNhBDmPdy0g {'project_path': 'ydb/core/kqp/workload_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/load_test/ut, name=unittest] (uid=rnd-nbt7dw4pl1suv18h): Infrastructure error - contact devtools@ for details. Suite build deps: [gm-Irv9cVUpEvn81_t5dDw {'project_path': 'ydb/core/load_test/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/memory_controller/ut, name=unittest] (uid=rnd-4boz6y2dmnwyhumr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v7itusf_XPOVYg5o79X_WA {'project_path': 'ydb/core/memory_controller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/bscontroller/ut, name=unittest] (uid=rnd-d6t0sfgjcefxbboi): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zL4sG_duvRRQcK32Fw6Lpg {'project_path': 'ydb/core/mind/bscontroller/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/mind/bscontroller/ut_bscontroller, name=unittest] (uid=rnd-5n5ugu4tod30u9co): Infrastructure error - contact devtools@ for details. Suite build deps: [tBuitbCwk8UPAwxsaueQ6g {'project_path': 'ydb/core/mind/bscontroller/ut_bscontroller', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/hive/ut, name=unittest] (uid=rnd-hu22pqs4ui4wajr7): Infrastructure error - contact devtools@ for details. Suite build deps: [NLRTO0rsKVRDiXnoRvyV3g {'project_path': 'ydb/core/mind/hive/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut, name=unittest] (uid=rnd-2joomn18lopw46tz): Infrastructure error - contact devtools@ for details. Suite build deps: [WKBzuMCGKD4s0m-K7PJ0Sw {'project_path': 'ydb/core/mind/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/mind/ut_fat, name=unittest] (uid=rnd-lgmuf4vmkqw6uklx): Infrastructure error - contact devtools@ for details. Suite build deps: [X3EBP3Zyuv2ssdQ4C5k5qQ {'project_path': 'ydb/core/mind/ut_fat', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/dread_cache_service/ut, name=unittest] (uid=rnd-s7yuvlgw9zfasc2y): Infrastructure error - contact devtools@ for details. Suite build deps: [6p7i9VjtxCMwd2wYsGfecA {'project_path': 'ydb/core/persqueue/dread_cache_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut, name=unittest] (uid=rnd-jhfd99dklcylelgg): Infrastructure error - contact devtools@ for details. Suite build deps: [sLr5bBZ5XkrmGdnqGvWRJQ {'project_path': 'ydb/core/persqueue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/slow, name=unittest] (uid=rnd-4l0tm7c4awidzqb2): Infrastructure error - contact devtools@ for details. Suite build deps: [Yv0igL7ImTNTs_IhNoidpg {'project_path': 'ydb/core/persqueue/ut/slow', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/persqueue/ut/ut_with_sdk, name=unittest] (uid=rnd-b2zdmk6qd5xo4eiu): Infrastructure error - contact devtools@ for details. Suite build deps: [MZjLa2SOihiT4unx6n0Q6Q {'project_path': 'ydb/core/persqueue/ut/ut_with_sdk', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/public_http/ut, name=unittest] (uid=rnd-aakzhwygfr9dnkz1): Infrastructure error - contact devtools@ for details. Suite build deps: [TSobaGMjJTH3HsIEXg9Lyg {'project_path': 'ydb/core/public_http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/quoter/ut, name=unittest] (uid=rnd-fdf233z3q0pkl3ed): Infrastructure error - contact devtools@ for details. Suite build deps: [7WshYQdC3ZU4B7p-0ITn6Q {'project_path': 'ydb/core/quoter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/scheme/ut, name=unittest] (uid=rnd-bd2h2vav8tg4vecq): Infrastructure error - contact devtools@ for details. Suite build deps: [SJE2ius14ptdmdzChVhbIA {'project_path': 'ydb/core/scheme/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/scheme/ut_pg, name=unittest] (uid=rnd-75biwc0xo1gu4tgz): Infrastructure error - contact devtools@ for details. Suite build deps: [9xAf20OQ0uDrKpMb8utTTQ {'project_path': 'ydb/core/scheme/ut_pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/certificate_check/ut, name=unittest] (uid=rnd-e4buvkii7ufp4r9x): Infrastructure error - contact devtools@ for details. Suite build deps: [sVcC2ZgcXnq9I_CGrZxDLg {'project_path': 'ydb/core/security/certificate_check/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ldap_auth_provider/ut, name=unittest] (uid=rnd-lv5jka2zvjevvv6o): Infrastructure error - contact devtools@ for details. Suite build deps: [Hv1JZlqjDc041b9Fds31Ew {'project_path': 'ydb/core/security/ldap_auth_provider/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/security/ut, name=unittest] (uid=rnd-o17dfhbacix17cjp): Infrastructure error - contact devtools@ for details. Suite build deps: [6GkoBxvo7P1kraXEWRIdZg {'project_path': 'ydb/core/security/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/aggregator/ut, name=unittest] (uid=rnd-9x8wp4gchjv0ng6t): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [v-5D_M66ptDAlbLmHresTQ {'project_path': 'ydb/core/statistics/aggregator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/statistics/database/ut, name=unittest] (uid=rnd-kq88kykrtvep5fe3): Infrastructure error - contact devtools@ for details. Suite build deps: [sFtxXbeKIxK8K6257P2hQA {'project_path': 'ydb/core/statistics/database/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/statistics/service/ut, name=unittest] (uid=rnd-orbfjn1n8ofoz5ko): Infrastructure error - contact devtools@ for details. Suite build deps: [IWdxmJZCdGkDkr_ZI-xCvA {'project_path': 'ydb/core/statistics/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/partition_stats/ut, name=unittest] (uid=rnd-8sklg6xtc7g0bsxc): Infrastructure error - contact devtools@ for details. Suite build deps: [P9zBNXjdHM5-1DBW-sNRqw {'project_path': 'ydb/core/sys_view/partition_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/query_stats/ut, name=unittest] (uid=rnd-oi0gws3myqv0h0sp): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [tgcAXgrEHKZDXXwPskrVNA {'project_path': 'ydb/core/sys_view/query_stats/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/sys_view/service/ut, name=unittest] (uid=rnd-1kyuoe71d84i8h8z): Infrastructure error - contact devtools@ for details. Suite build deps: [MqfkTDqqrA3U2rOW8e0jKQ {'project_path': 'ydb/core/sys_view/service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/sys_view/ut, name=unittest] (uid=rnd-bx32tn7t2cjfvq81): Infrastructure error - contact devtools@ for details. Suite build deps: [mEjkNLbaoT14oBk5AxsPZQ {'project_path': 'ydb/core/sys_view/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet/ut, name=unittest] (uid=rnd-mppx89hezdfdw06w): Infrastructure error - contact devtools@ for details. Suite build deps: [buJ5bNh17VNNRqZGU3thJQ {'project_path': 'ydb/core/tablet/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut, name=unittest] (uid=rnd-srensftv4x34m2yx): Infrastructure error - contact devtools@ for details. Suite build deps: [3HZqCWhVO25ALJI2dV6b7A {'project_path': 'ydb/core/tablet_flat/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tablet_flat/ut_pg, name=unittest] (uid=rnd-8sfpslnqzl32ontx): Infrastructure error - contact devtools@ for details. Suite build deps: [W22r3jZ-as-Nk6p3bPAEkg {'project_path': 'ydb/core/tablet_flat/ut_pg', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/testlib/actors/ut, name=unittest] (uid=rnd-kr85l2v83ruogh72): Infrastructure error - contact devtools@ for details. Suite build deps: [G8WjuvLEtvAL1glQA091Kg {'project_path': 'ydb/core/testlib/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/transfer/ut/functional, name=unittest] (uid=rnd-63zrkf4v2hddazj5): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [rdw5rWWFdFKg-tsmeSbxTg {'project_path': 'ydb/core/transfer/ut/functional', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/engines/ut, name=unittest] (uid=rnd-695s7ngpeozpjcbe): Infrastructure error - contact devtools@ for details. Suite build deps: [ON4qEKgqIJRWRtbh_LVJfA {'project_path': 'ydb/core/tx/columnshard/engines/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/splitter/ut, name=unittest] (uid=rnd-nzvzssqlmcrmxkdy): Infrastructure error - contact devtools@ for details. Suite build deps: [N3ride-DYMAavU9Q4Lp6pg {'project_path': 'ydb/core/tx/columnshard/splitter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_rw, name=unittest] (uid=rnd-y3vcirtlb3hnu047): Infrastructure error - contact devtools@ for details. Suite build deps: [par7jHdUJ4hUTxhS3Im9Cg {'project_path': 'ydb/core/tx/columnshard/ut_rw', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/columnshard/ut_schema, name=unittest] (uid=rnd-hdog5bcl9o8nfgf7): Infrastructure error - contact devtools@ for details. Suite build deps: [qS4SjflbJzmN22yuGIaXTw {'project_path': 'ydb/core/tx/columnshard/ut_schema', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/conveyor_composite/ut, name=unittest] (uid=rnd-1dlaoge9swhne4rw): Infrastructure error - contact devtools@ for details. Suite build deps: [Ca3NbTFnqmkqjS-cbRHqiA {'project_path': 'ydb/core/tx/conveyor_composite/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/coordinator/ut, name=unittest] (uid=rnd-mftew17n40r9k530): Infrastructure error - contact devtools@ for details. Suite build deps: [PBfFcLbihSC7Z3drvv3Obw {'project_path': 'ydb/core/tx/coordinator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/build_index/ut, name=unittest] (uid=rnd-ogt7s0gn1vagolhu): Infrastructure error - contact devtools@ for details. Suite build deps: [lJM5AKHTt6uQN1HDbh2SWg {'project_path': 'ydb/core/tx/datashard/build_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_background_compaction, name=unittest] (uid=rnd-lw3esm8xl0hpj28z): Infrastructure error - contact devtools@ for details. Suite build deps: [Ed7NXtR-NEnge9-ZR_5ZXA {'project_path': 'ydb/core/tx/datashard/ut_background_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_change_collector, name=unittest] (uid=rnd-34gbxuvhjoq03tif): Infrastructure error - contact devtools@ for details. Suite build deps: [TC0KRhm9ch8l0cmMqA0jQg {'project_path': 'ydb/core/tx/datashard/ut_change_collector', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_change_exchange, name=unittest] (uid=rnd-fummsem0rw3qt50g): Infrastructure error - contact devtools@ for details. Suite build deps: [lruW9tkVjtkcmASdB4eopw {'project_path': 'ydb/core/tx/datashard/ut_change_exchange', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_column_stats, name=unittest] (uid=rnd-srv7kvq32a7ofbv9): Infrastructure error - contact devtools@ for details. Suite build deps: [f2kzpwIwaXTLyTjfmFD04g {'project_path': 'ydb/core/tx/datashard/ut_column_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_compaction, name=unittest] (uid=rnd-xf9qzkvskcldjfxv): Infrastructure error - contact devtools@ for details. Suite build deps: [IhluGo4Y7Srsf2l5LN4QTw {'project_path': 'ydb/core/tx/datashard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_data_cleanup, name=unittest] (uid=rnd-31vn9lzynp9hd9zu): Infrastructure error - contact devtools@ for details. Suite build deps: [DWeL4NHvnJmUjvmz6lQUzA {'project_path': 'ydb/core/tx/datashard/ut_data_cleanup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_followers, name=unittest] (uid=rnd-8f5vph762hg4j552): Infrastructure error - contact devtools@ for details. Suite build deps: [50yQF4WOUsczRAPK0r81Ag {'project_path': 'ydb/core/tx/datashard/ut_followers', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_backup, name=unittest] (uid=rnd-jx819g55dfgswoan): Infrastructure error - contact devtools@ for details. Suite build deps: [j4DGOX_I5injMt6-8-GFrg {'project_path': 'ydb/core/tx/datashard/ut_incremental_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_incremental_restore_scan, name=unittest] (uid=rnd-6it4ebt3u69frz8w): Infrastructure error - contact devtools@ for details. Suite build deps: [5uo23LUreec_Bo8E2rh9zA {'project_path': 'ydb/core/tx/datashard/ut_incremental_restore_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_init, name=unittest] (uid=rnd-i45b9shfl63nxg83): Infrastructure error - contact devtools@ for details. Suite build deps: [bgY1N9GzExYWhF6LbyTJOA {'project_path': 'ydb/core/tx/datashard/ut_init', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_keys, name=unittest] (uid=rnd-6m1emj1w2s8myq2x): Infrastructure error - contact devtools@ for details. Suite build deps: [nwTOQkuyDzzCQe0qE0JWsw {'project_path': 'ydb/core/tx/datashard/ut_keys', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp, name=unittest] (uid=rnd-hazks5deiebetdw8): Infrastructure error - contact devtools@ for details. Suite build deps: [Sy750bn8ZLzrzTtlYJkedQ {'project_path': 'ydb/core/tx/datashard/ut_kqp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_errors, name=unittest] (uid=rnd-5itt2nw013gwgptf): Infrastructure error - contact devtools@ for details. Suite build deps: [LlhikqM7ZRcLpCzPuIy7sA {'project_path': 'ydb/core/tx/datashard/ut_kqp_errors', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_kqp_scan, name=unittest] (uid=rnd-avnskf6l3vdvh9uh): Infrastructure error - contact devtools@ for details. Suite build deps: [XDj0o_xYyMjtCEB0JXfKbg {'project_path': 'ydb/core/tx/datashard/ut_kqp_scan', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_locks, name=unittest] (uid=rnd-egh22x90c8m99918): Infrastructure error - contact devtools@ for details. Suite build deps: [519upamC0JWX4ounNEdz0g {'project_path': 'ydb/core/tx/datashard/ut_locks', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minikql, name=unittest] (uid=rnd-ubr5tfx99ouctmoe): Infrastructure error - contact devtools@ for details. Suite build deps: [A1RR70SJbOh7a0q9LiP86Q {'project_path': 'ydb/core/tx/datashard/ut_minikql', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_minstep, name=unittest] (uid=rnd-sxr3e3gh1i9nwr6o): Infrastructure error - contact devtools@ for details. Suite build deps: [cxdHfFklzLCKeMCmY34Dow {'project_path': 'ydb/core/tx/datashard/ut_minstep', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_order, name=unittest] (uid=rnd-obtzucufka3jq3co): Infrastructure error - contact devtools@ for details. Suite build deps: [ec9J9B3ZX2pINXsofrbLsQ {'project_path': 'ydb/core/tx/datashard/ut_order', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_range_ops, name=unittest] (uid=rnd-tf2suya8xp7pd9fi): Infrastructure error - contact devtools@ for details. Suite build deps: [RQtHc1kM3LrPP3p1DpUdhQ {'project_path': 'ydb/core/tx/datashard/ut_range_ops', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_iterator, name=unittest] (uid=rnd-7n0ygzmpjs5qei7t): Infrastructure error - contact devtools@ for details. Suite build deps: [Ndqvt7YtvJoeIqBMbug9ig {'project_path': 'ydb/core/tx/datashard/ut_read_iterator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_read_table, name=unittest] (uid=rnd-9tgfmdce11ylspcg): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [veHo4cKHGh7eLddjVtjEfQ {'project_path': 'ydb/core/tx/datashard/ut_read_table', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/datashard/ut_reassign, name=unittest] (uid=rnd-hegeha5cctslgcsj): Infrastructure error - contact devtools@ for details. Suite build deps: [JKBUEPdJcJeT7KWSTHoiDw {'project_path': 'ydb/core/tx/datashard/ut_reassign', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_replication, name=unittest] (uid=rnd-40bs5ab4lb7rzdlt): Infrastructure error - contact devtools@ for details. Suite build deps: [7dinraTGwxANCM1bJZQIJQ {'project_path': 'ydb/core/tx/datashard/ut_replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_rs, name=unittest] (uid=rnd-lum124i5nytiztex): Infrastructure error - contact devtools@ for details. Suite build deps: [ivhP2n_nr3e9ulp17Mxlxw {'project_path': 'ydb/core/tx/datashard/ut_rs', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_sequence, name=unittest] (uid=rnd-0o36qi5o1nybbu5y): Infrastructure error - contact devtools@ for details. Suite build deps: [ImJUf9ojLilH6j1iwTvmvQ {'project_path': 'ydb/core/tx/datashard/ut_sequence', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_snapshot, name=unittest] (uid=rnd-hgjp0xmfjk26wyrx): Infrastructure error - contact devtools@ for details. Suite build deps: [NBF8NsSdwnnuSAtwzZISuw {'project_path': 'ydb/core/tx/datashard/ut_snapshot', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_stats, name=unittest] (uid=rnd-qg22u8nlhstpe64u): Infrastructure error - contact devtools@ for details. Suite build deps: [EgqnKrK7j4xCBtafhDOKOg {'project_path': 'ydb/core/tx/datashard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_trace, name=unittest] (uid=rnd-laj68mjm2u4tbpgj): Infrastructure error - contact devtools@ for details. Suite build deps: [1F-1ZfUGX1cZv0fC4ZppZg {'project_path': 'ydb/core/tx/datashard/ut_trace', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_upload_rows, name=unittest] (uid=rnd-qsy0wp2i0apfjtgt): Infrastructure error - contact devtools@ for details. Suite build deps: [P8ATl7cyii4QYvL92oWjrg {'project_path': 'ydb/core/tx/datashard/ut_upload_rows', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/datashard/ut_volatile, name=unittest] (uid=rnd-p4ik47t3qtcu91vs): Infrastructure error - contact devtools@ for details. Suite build deps: [B6gb-ZW92oDAxyqqvwoueg {'project_path': 'ydb/core/tx/datashard/ut_volatile', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/locks/ut_range_treap, name=unittest] (uid=rnd-w43v5h8osaaflco0): Infrastructure error - contact devtools@ for details. Suite build deps: [SN5PyQ6kxdIcqG6SKiYTJQ {'project_path': 'ydb/core/tx/locks/ut_range_treap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/long_tx_service/ut, name=unittest] (uid=rnd-jpznfrh4wervpvnr): Infrastructure error - contact devtools@ for details. Suite build deps: [EvOmqMCCqjDjoQBXbB-Prw {'project_path': 'ydb/core/tx/long_tx_service/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/mediator/ut, name=unittest] (uid=rnd-hvp9lhlrgvpeozrf): Infrastructure error - contact devtools@ for details. Suite build deps: [JEDo5VmT0xCh8Orn1mMTeQ {'project_path': 'ydb/core/tx/mediator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_assign_tx_id, name=unittest] (uid=rnd-ybb4uwxrckt651nx): Infrastructure error - contact devtools@ for details. Suite build deps: [9fXG8TUUL10SxTIelIP9-g {'project_path': 'ydb/core/tx/replication/controller/ut_assign_tx_id', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_dst_creator, name=unittest] (uid=rnd-ws060reljnou6a0z): Infrastructure error - contact devtools@ for details. Suite build deps: [36N5z1cLijEfUggFDJzQOQ {'project_path': 'ydb/core/tx/replication/controller/ut_dst_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_stream_creator, name=unittest] (uid=rnd-zp1qyyfzorsriuli): Infrastructure error - contact devtools@ for details. Suite build deps: [khIMP9i_PhgOyvtGr3Th4Q {'project_path': 'ydb/core/tx/replication/controller/ut_stream_creator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/controller/ut_target_discoverer, name=unittest] (uid=rnd-099djr3a0q5qnsrr): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wWcoagIAUlSADTlOrDUL0g {'project_path': 'ydb/core/tx/replication/controller/ut_target_discoverer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/replication/service/ut_topic_reader, name=unittest] (uid=rnd-aiug78hq43folxpl): Infrastructure error - contact devtools@ for details. Suite build deps: [tYkuucOsGZ7ucSywgW5ktA {'project_path': 'ydb/core/tx/replication/service/ut_topic_reader', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/service/ut_worker, name=unittest] (uid=rnd-5ggp9livjmkjoeo7): Infrastructure error - contact devtools@ for details. Suite build deps: [tY2audAvTndyD6d6KGMruw {'project_path': 'ydb/core/tx/replication/service/ut_worker', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/replication/ydb_proxy/ut, name=unittest] (uid=rnd-rcw8o3izk3bwh0gr): Infrastructure error - contact devtools@ for details. Suite build deps: [XcMIKyGO2OBuD5sbqtiomQ {'project_path': 'ydb/core/tx/replication/ydb_proxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_monitoring, name=unittest] (uid=rnd-j1j4fxp272t9l0pe): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [wiIrGU40aOrnVhLMl-pS_w {'project_path': 'ydb/core/tx/scheme_board/ut_monitoring', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/scheme_board/ut_populator, name=unittest] (uid=rnd-v9b4syk5lpnvx7hh): Infrastructure error - contact devtools@ for details. Suite build deps: [oDizRx8iwcYXbM6XP8ayzA {'project_path': 'ydb/core/tx/scheme_board/ut_populator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_replica, name=unittest] (uid=rnd-y7lwq9r79d7x9to9): Infrastructure error - contact devtools@ for details. Suite build deps: [J50PjVu03eHTShfkopU6oA {'project_path': 'ydb/core/tx/scheme_board/ut_replica', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/scheme_board/ut_subscriber, name=unittest] (uid=rnd-e2z1xsd4e2jzn5q1): Infrastructure error - contact devtools@ for details. Suite build deps: [ltT7N7yJ1-aKCE6In2Iazg {'project_path': 'ydb/core/tx/scheme_board/ut_subscriber', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_auditsettings, name=unittest] (uid=rnd-40j2k8lhglb8vzdm): Infrastructure error - contact devtools@ for details. Suite build deps: [GE3na-0fKQIlPVDAJVGMkg {'project_path': 'ydb/core/tx/schemeshard/ut_auditsettings', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_background_cleaning, name=unittest] (uid=rnd-9lq74rvwfh0wk5fh): Infrastructure error - contact devtools@ for details. Suite build deps: [3Eakc7LDropS8CoCLaMKSQ {'project_path': 'ydb/core/tx/schemeshard/ut_background_cleaning', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup, name=unittest] (uid=rnd-pu0uyt0fe80rr0p0): Infrastructure error - contact devtools@ for details. Suite build deps: [0ys0tEjpEjY_hSv2LOJ9Kg {'project_path': 'ydb/core/tx/schemeshard/ut_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection, name=unittest] (uid=rnd-htxmevw1ic0ly2jj): Infrastructure error - contact devtools@ for details. Suite build deps: [AvtQhdmdYlgtZGQZAVzT3w {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_backup_collection_reboots, name=unittest] (uid=rnd-uhvm62lqr81pwgli): Infrastructure error - contact devtools@ for details. Suite build deps: [kzavjkXh-MFkjSpAPaVh3Q {'project_path': 'ydb/core/tx/schemeshard/ut_backup_collection_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_base_reboots, name=unittest] (uid=rnd-lvbqo5xbu6b5oyz5): Infrastructure error - contact devtools@ for details. Suite build deps: [SWZfRP638apOusNQUNXsZA {'project_path': 'ydb/core/tx/schemeshard/ut_base_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume, name=unittest] (uid=rnd-5k22pbxbm4d5jbin): Infrastructure error - contact devtools@ for details. Suite build deps: [hUA0jVe9T93yFODoAHhgTQ {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_bsvolume_reboots, name=unittest] (uid=rnd-l32nd7mahxkjdxl9): Infrastructure error - contact devtools@ for details. Suite build deps: [OEptIvlE4AMzW3ME4rcJlg {'project_path': 'ydb/core/tx/schemeshard/ut_bsvolume_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_cdc_stream_reboots, name=unittest] (uid=rnd-sxrm270isk17paph): Infrastructure error - contact devtools@ for details. Suite build deps: [4mclijsbV24fn21BA5ig_Q {'project_path': 'ydb/core/tx/schemeshard/ut_cdc_stream_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_column_build, name=unittest] (uid=rnd-sx2vqq8dx9atmg88): Infrastructure error - contact devtools@ for details. Suite build deps: [jzFa8oOo-Y63jr-fWwHcig {'project_path': 'ydb/core/tx/schemeshard/ut_column_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_compaction, name=unittest] (uid=rnd-7nza00wb3bmh29bm): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xZ4R-SuzXFIE8cuS3ZHhvw {'project_path': 'ydb/core/tx/schemeshard/ut_compaction', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_continuous_backup, name=unittest] (uid=rnd-j5hytdx19wfi3ncf): Infrastructure error - contact devtools@ for details. Suite build deps: [JGY4ppX8h4GQxWeLOq6NHA {'project_path': 'ydb/core/tx/schemeshard/ut_continuous_backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_data_erasure, name=unittest] (uid=rnd-ldwuqibzs8vax37d): Infrastructure error - contact devtools@ for details. Suite build deps: [Szhbx92txwpgke0y3Cr74w {'project_path': 'ydb/core/tx/schemeshard/ut_data_erasure', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export, name=unittest] (uid=rnd-1cludzkr6jbj3141): Infrastructure error - contact devtools@ for details. Suite build deps: [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zeB8321f8FND7U1nPLlocg {'project_path': 'ydb/core/tx/schemeshard/ut_export', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_export_reboots_s3, name=unittest] (uid=rnd-nu55ey5pe52js60p): Infrastructure error - contact devtools@ for details. Suite build deps: [PfP4kRynpCXVBy3AxgYm2Q {'project_path': 'ydb/core/tx/schemeshard/ut_export_reboots_s3', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [t0xDYv0EVYq8grba50G1eA {'project_path': 'ydb/tests/supp', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source, name=unittest] (uid=rnd-mh286hy5dirvxi5y): Infrastructure error - contact devtools@ for details. Suite build deps: [JCnTz_wWSoHX8QM7gaUMAQ {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_external_data_source_reboots, name=unittest] (uid=rnd-mekns88lakhfti9q): Infrastructure error - contact devtools@ for details. Suite build deps: [GsLmeqBKPBYyaqlyn2yjKg {'project_path': 'ydb/core/tx/schemeshard/ut_external_data_source_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_extsubdomain_reboots, name=unittest] (uid=rnd-sjr8rr8rvufpb11d): Infrastructure error - contact devtools@ for details. Suite build deps: [5yMSufNBHN0EeYF1PW3tCA {'project_path': 'ydb/core/tx/schemeshard/ut_extsubdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_filestore_reboots, name=unittest] (uid=rnd-ohcuvt9p77rqiyq1): Infrastructure error - contact devtools@ for details. Suite build deps: [4dZRzw23GQVfk4TxJfopTQ {'project_path': 'ydb/core/tx/schemeshard/ut_filestore_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index, name=unittest] (uid=rnd-xrq02qejuu0g6l4i): Infrastructure error - contact devtools@ for details. Suite build deps: [-rPfpTI_cH4zwmci8HkU1Q {'project_path': 'ydb/core/tx/schemeshard/ut_index', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build, name=unittest] (uid=rnd-qx1hwuu34nme6nvj): Infrastructure error - contact devtools@ for details. Suite build deps: [oTMK5IgibclWDngtc9e-sw {'project_path': 'ydb/core/tx/schemeshard/ut_index_build', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_index_build_reboots, name=unittest] (uid=rnd-0e23prs62kcct3zl): Infrastructure error - contact devtools@ for details. Suite build deps: [gg8rKprErnVW5MAVsYnEPA {'project_path': 'ydb/core/tx/schemeshard/ut_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_login, name=unittest] (uid=rnd-1x3l0gxh4m5x2fag): Infrastructure error - contact devtools@ for details. Suite build deps: [NRCeknv-3NWhnA0kIyQnGQ {'project_path': 'ydb/core/tx/schemeshard/ut_login', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap, name=unittest] (uid=rnd-dt37h2ds221rvnx0): Infrastructure error - contact devtools@ for details. Suite build deps: [NhLfhubTZwPp8bhfrcsUNw {'project_path': 'ydb/core/tx/schemeshard/ut_olap', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_olap_reboots, name=unittest] (uid=rnd-syc38l9cbs0zzcsa): Infrastructure error - contact devtools@ for details. Suite build deps: [EWQJuIGxXj1iTl9IX_Zu4Q {'project_path': 'ydb/core/tx/schemeshard/ut_olap_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_reboots, name=unittest] (uid=rnd-uureorzonjbrxry7): Infrastructure error - contact devtools@ for details. Suite build deps: [XndnUI2CeZtJfdSH-WTRoA {'project_path': 'ydb/core/tx/schemeshard/ut_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_replication_reboots, name=unittest] (uid=rnd-wf68vwkllr3tu4ae): Infrastructure error - contact devtools@ for details. Suite build deps: [oJxrsnJwfQMhvWFRB_Uu_w {'project_path': 'ydb/core/tx/schemeshard/ut_replication_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_restore, name=unittest] (uid=rnd-vd890798n8vdfuar): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zNKcMcb370I9vTxeOwL4Vw {'project_path': 'ydb/core/tx/schemeshard/ut_restore', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_rtmr_reboots, name=unittest] (uid=rnd-sv1erbg1oapctci9): Infrastructure error - contact devtools@ for details. Suite build deps: [Rnfs9OlphDORXVoneM9SZw {'project_path': 'ydb/core/tx/schemeshard/ut_rtmr_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ru_calculator, name=unittest] (uid=rnd-hbwcu7pvc25tftwg): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xQ6D19y9SFyKQjQBXhJ4Og {'project_path': 'ydb/core/tx/schemeshard/ut_ru_calculator', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_sequence_reboots, name=unittest] (uid=rnd-evbria6ahj8dik7k): Infrastructure error - contact devtools@ for details. Suite build deps: [SA9-FQUmIFGJJt2nPruaTw {'project_path': 'ydb/core/tx/schemeshard/ut_sequence_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless, name=unittest] (uid=rnd-sqw4ouyffg2atlv4): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [z50Wv4Z0hw8LWnZvUpXdXQ {'project_path': 'ydb/core/tx/schemeshard/ut_serverless', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_serverless_reboots, name=unittest] (uid=rnd-th8398et79tz35p6): Infrastructure error - contact devtools@ for details. Suite build deps: [j7UT-xTEk1toC7jTk9NnyA {'project_path': 'ydb/core/tx/schemeshard/ut_serverless_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_split_merge, name=unittest] (uid=rnd-1ndgzg3rjukzxuws): Infrastructure error - contact devtools@ for details. Suite build deps: [pWpOOgzr7vNh0C2pqhCN9g {'project_path': 'ydb/core/tx/schemeshard/ut_split_merge', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_stats, name=unittest] (uid=rnd-og4ymun3f701rqgi): Infrastructure error - contact devtools@ for details. Suite build deps: [UjfbYPhvupZZXKnogTtLMQ {'project_path': 'ydb/core/tx/schemeshard/ut_stats', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain, name=unittest] (uid=rnd-w7xg7bbe12z6awj5): Infrastructure error - contact devtools@ for details. Suite build deps: [63sGusIRsBQjwW_QYNYxSg {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_subdomain_reboots, name=unittest] (uid=rnd-qwpihyp97sp3ym1q): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [u6zC-QIIb6t8lO7jX-KlSA {'project_path': 'ydb/core/tx/schemeshard/ut_subdomain_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/schemeshard/ut_transfer, name=unittest] (uid=rnd-e5pnu0kpjpwlu3ev): Infrastructure error - contact devtools@ for details. Suite build deps: [BFfnGzkWuTUqaIcwxcH_-g {'project_path': 'ydb/core/tx/schemeshard/ut_transfer', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_ttl, name=unittest] (uid=rnd-v8yxpii7b4fysg9i): Infrastructure error - contact devtools@ for details. Suite build deps: [f8_mqTfljKrvVR8ODe1_tA {'project_path': 'ydb/core/tx/schemeshard/ut_ttl', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes, name=unittest] (uid=rnd-a888zimu6q4ry2jw): Infrastructure error - contact devtools@ for details. Suite build deps: [e_1BL7iFcTBOM1YC_wi6jA {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_user_attributes_reboots, name=unittest] (uid=rnd-0auw8c8jscinpbpo): Infrastructure error - contact devtools@ for details. Suite build deps: [-RVPJdQnZHVG3sKm1wTD3g {'project_path': 'ydb/core/tx/schemeshard/ut_user_attributes_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/schemeshard/ut_vector_index_build_reboots, name=unittest] (uid=rnd-9ucm1s678kyd49h1): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [vmP5hs82bgSg38rKRU4Ecg {'project_path': 'ydb/core/tx/schemeshard/ut_vector_index_build_reboots', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/core/tx/sequenceproxy/ut, name=unittest] (uid=rnd-xc307u2tpt3jvxgy): Infrastructure error - contact devtools@ for details. Suite build deps: [VXbVZkE8vzCDiSp5bLPEQA {'project_path': 'ydb/core/tx/sequenceproxy/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sequenceshard/ut, name=unittest] (uid=rnd-8lczexwqd79ofn1z): Infrastructure error - contact devtools@ for details. Suite build deps: [OxuZAQI5n2iljYgURntxWg {'project_path': 'ydb/core/tx/sequenceshard/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/sharding/ut, name=unittest] (uid=rnd-a0ujj67gypaupke2): Infrastructure error - contact devtools@ for details. Suite build deps: [GHjrklGcEP8dOejwMGAM5A {'project_path': 'ydb/core/tx/sharding/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tiering/ut, name=unittest] (uid=rnd-2vx5kd3rtm1uoor6): Infrastructure error - contact devtools@ for details. Suite build deps: [NJ3iv-y6EnkgPm3_lpwlkA {'project_path': 'ydb/core/tx/tiering/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/time_cast/ut, name=unittest] (uid=rnd-0tuj2oskdrspvokj): Infrastructure error - contact devtools@ for details. Suite build deps: [McDPw-B6zjlGlKm3uuTyaw {'project_path': 'ydb/core/tx/time_cast/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator/ut, name=unittest] (uid=rnd-zoev3r1oop53kwzd): Infrastructure error - contact devtools@ for details. Suite build deps: [dPbnuMfYwVqVGra2l7XMZQ {'project_path': 'ydb/core/tx/tx_allocator/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_allocator_client/ut, name=unittest] (uid=rnd-6ul0ujcf6597wb0q): Infrastructure error - contact devtools@ for details. Suite build deps: [MYhfEGsg5gouIRpenNMdbA {'project_path': 'ydb/core/tx/tx_allocator_client/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_base_tenant, name=unittest] (uid=rnd-87yux90univz2w0q): Infrastructure error - contact devtools@ for details. Suite build deps: [qKfJWVW3nQ24vyyAGLRCSA {'project_path': 'ydb/core/tx/tx_proxy/ut_base_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_encrypted_storage, name=unittest] (uid=rnd-2ztarj7405k213c3): Infrastructure error - contact devtools@ for details. Suite build deps: [JiqpXH62y_WsSLjLfsssSQ {'project_path': 'ydb/core/tx/tx_proxy/ut_encrypted_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_ext_tenant, name=unittest] (uid=rnd-f1tnn5nxp6d9s259): Infrastructure error - contact devtools@ for details. Suite build deps: [9SJjOu_RsH4Zn9eolhULlQ {'project_path': 'ydb/core/tx/tx_proxy/ut_ext_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_schemereq, name=unittest] (uid=rnd-na8006llcfr4cr0l): Infrastructure error - contact devtools@ for details. Suite build deps: [FBmaJZcTAEPgDPTCG9Lrvg {'project_path': 'ydb/core/tx/tx_proxy/ut_schemereq', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/tx/tx_proxy/ut_storage_tenant, name=unittest] (uid=rnd-okry7lny9vibssem): Infrastructure error - contact devtools@ for details. Suite build deps: [rz4FgeLqO8fIs8a5-nC5VA {'project_path': 'ydb/core/tx/tx_proxy/ut_storage_tenant', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/util/ut, name=unittest] (uid=rnd-20gxnut2quzlfrk6): Infrastructure error - contact devtools@ for details. Suite build deps: [P1vzN8NEfkB2Bq-4zNhaXg {'project_path': 'ydb/core/util/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/ut, name=unittest] (uid=rnd-zwhlzynh6bakjl9j): Infrastructure error - contact devtools@ for details. Suite build deps: [UyWieU-1nC3erjvV98QpTQ {'project_path': 'ydb/core/ymq/actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/actor/yc_search_ut, name=unittest] (uid=rnd-bvyz0gq9cpfj95rt): Infrastructure error - contact devtools@ for details. Suite build deps: [QaMtdRhs0Sziq3KM1cISTg {'project_path': 'ydb/core/ymq/actor/yc_search_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/base/ut, name=unittest] (uid=rnd-p2dghhg5j55vtwgj): Infrastructure error - contact devtools@ for details. Suite build deps: [kFt49FXjpRkBmIfDX0gqxQ {'project_path': 'ydb/core/ymq/base/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/http/ut, name=unittest] (uid=rnd-7u39vq3ny3x4f89o): Infrastructure error - contact devtools@ for details. Suite build deps: [4PVUS965Tp7y_5ZTxzR3YA {'project_path': 'ydb/core/ymq/http/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/core/ymq/ut, name=unittest] (uid=rnd-setdcf6hs7r5nv0h): Infrastructure error - contact devtools@ for details. Suite build deps: [Z5DdgjeHGeYd0vstQaui8g {'project_path': 'ydb/core/ymq/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/actors/testlib/ut, name=unittest] (uid=rnd-8ch90qyew5dbm87d): Infrastructure error - contact devtools@ for details. Suite build deps: [aCRyOlL-MSdrRe0gXL4u6Q {'project_path': 'ydb/library/actors/testlib/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/ncloud/impl/ut, name=unittest] (uid=rnd-24q2d1bjpitw9ge3): Infrastructure error - contact devtools@ for details. Suite build deps: [qgXNEjkGo2HOM0JdkH_pCQ {'project_path': 'ydb/library/ncloud/impl/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/persqueue/topic_parser/ut, name=unittest] (uid=rnd-l8et8m22k7b32qis): Infrastructure error - contact devtools@ for details. Suite build deps: [S6c6lP3QWNRN1QZGCFL4ig {'project_path': 'ydb/library/persqueue/topic_parser/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/query_actor/ut, name=unittest] (uid=rnd-uo51hgfzpee65msw): Infrastructure error - contact devtools@ for details. Suite build deps: [AaQnu9EuYJsOy_oOcUJ3WQ {'project_path': 'ydb/library/query_actor/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/dq/runtime/ut, name=unittest] (uid=rnd-dzd220kb90ru5zky): Infrastructure error - contact devtools@ for details. Suite build deps: [Bk4cC-AQDuya-rWwNGIzNg {'project_path': 'ydb/library/yql/dq/runtime/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/generic/actors/ut, name=unittest] (uid=rnd-zcaqwzt1pw2il4dy): Infrastructure error - contact devtools@ for details. Suite build deps: [pWT3HdnD4yHriNCf5NTrcA {'project_path': 'ydb/library/yql/providers/generic/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/library/yql/providers/solomon/actors/ut, name=unittest] (uid=rnd-nxnwgcubgm4b7jot): Infrastructure error - contact devtools@ for details. Suite build deps: [D63QH9kun45aOk-RnCe5jQ {'project_path': 'ydb/library/yql/providers/solomon/actors/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [afbAP8BqVD2NZxbfhk0uJQ {'project_path': 'ydb/library/yql/tools/solomon_emulator/recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [jWrmtcwzfDvZhQhFDy429w {'project_path': 'ydb/library/yql/tools/solomon_emulator/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/lib/ydb_cli/topic/ut, name=unittest] (uid=rnd-01pt012su1rl2ben): Infrastructure error - contact devtools@ for details. Suite build deps: [ZAgnxQHt0PuJ03jhrYtcVg {'project_path': 'ydb/public/lib/ydb_cli/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/federated_topic/ut, name=unittest] (uid=rnd-1fvdiwv79t196zqs): Infrastructure error - contact devtools@ for details. Suite build deps: [eOSLsMbxYu5a3-J_HHRzsg {'project_path': 'ydb/public/sdk/cpp/src/client/federated_topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut, name=unittest] (uid=rnd-ugbttstp2tdjrcae): Infrastructure error - contact devtools@ for details. Suite build deps: [ZSiCdByYvq5Th8ph8AnP-A {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut, name=unittest] (uid=rnd-477fa0ptaztpxo0c): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yM0a8IXO30dXcEabBviJKg {'project_path': 'ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/public/sdk/cpp/src/client/topic/ut, name=unittest] (uid=rnd-9cv10cmxue8sktfc): Infrastructure error - contact devtools@ for details. Suite build deps: [08cwdfUF3kFwjZxDtP9p-w {'project_path': 'ydb/public/sdk/cpp/src/client/topic/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/cms/ut, name=unittest] (uid=rnd-c7k98939uty2ub4k): Infrastructure error - contact devtools@ for details. Suite build deps: [ZQ1ZZp4vufNrU-ZWHmVHuw {'project_path': 'ydb/services/cms/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/config/ut, name=unittest] (uid=rnd-pkia3l0eijjxgnu5): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [zwBQVwes4uEpGo1raUPKQw {'project_path': 'ydb/services/config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/datastreams/ut, name=unittest] (uid=rnd-eppy0tpqajydvpa8): Infrastructure error - contact devtools@ for details. Suite build deps: [XD-EWQA6rJWof-NuiWxAig {'project_path': 'ydb/services/datastreams/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/dynamic_config/ut, name=unittest] (uid=rnd-puwic6ina40pm3xm): Infrastructure error - contact devtools@ for details. Suite build deps: [VEcNCqVK8BMUwI665sxRXQ {'project_path': 'ydb/services/dynamic_config/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ext_index/ut, name=unittest] (uid=rnd-9fxflagfz8cxuvuw): Infrastructure error - contact devtools@ for details. Suite build deps: [j2ckFwW--OFNSmyvzjlDvA {'project_path': 'ydb/services/ext_index/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/fq/ut_integration, name=unittest] (uid=rnd-i7qe94dw3szwbug0): Infrastructure error - contact devtools@ for details. Suite build deps: [KEkQj5TTvL7v6XKWnuRaSw {'project_path': 'ydb/services/fq/ut_integration', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/keyvalue/ut, name=unittest] (uid=rnd-74k7dfbzfhw2erz4): Infrastructure error - contact devtools@ for details. Suite build deps: [ZP4SCJEfSJ-ZBQZ9ApQiKQ {'project_path': 'ydb/services/keyvalue/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/metadata/secret/ut, name=unittest] (uid=rnd-97aytdwa1f8vhsbw): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [xXLWmAvaYQ2Ks3quFre-Xg {'project_path': 'ydb/services/metadata/secret/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/persqueue_cluster_discovery/ut, name=unittest] (uid=rnd-m8fxxq6uk4jgkxst): Infrastructure error - contact devtools@ for details. Suite build deps: [q6KCN12AScvsCakB4TzYIA {'project_path': 'ydb/services/persqueue_cluster_discovery/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/persqueue_v1/ut, name=unittest] (uid=rnd-mpd3yw4rmz360fp8): Infrastructure error - contact devtools@ for details. Suite build deps: [eYQllrdolYztQbIZVGcxlw {'project_path': 'ydb/services/persqueue_v1/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/rate_limiter/ut, name=unittest] (uid=rnd-rqrzgmr5p9h5bgna): Infrastructure error - contact devtools@ for details. Suite build deps: [KMt1c0Ov4yE8eo3Nzkc8KA {'project_path': 'ydb/services/rate_limiter/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/sdk_sessions_pool_ut, name=unittest] (uid=rnd-9p3pzf83icmbl3cn): Infrastructure error - contact devtools@ for details. Suite build deps: [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [w_Bb3qwIt964VG8dnKn8qQ {'project_path': 'ydb/services/ydb/sdk_sessions_pool_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/services/ydb/sdk_sessions_ut, name=unittest] (uid=rnd-87pc6mdyd84t8kq4): Infrastructure error - contact devtools@ for details. Suite build deps: [VBOtjlTn-GFJuiMxgpYFOg {'project_path': 'ydb/services/ydb/sdk_sessions_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/table_split_ut, name=unittest] (uid=rnd-6vhrakkh79h0nxw1): Infrastructure error - contact devtools@ for details. Suite build deps: [dq72y74lBIFUL7wjYveIJg {'project_path': 'ydb/services/ydb/table_split_ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/services/ydb/ut, name=unittest] (uid=rnd-ro6yweei469pdip9): Infrastructure error - contact devtools@ for details. Suite build deps: [nmQ8_29ho_-p4UsKoNSzvQ {'project_path': 'ydb/services/ydb/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/control_plane_storage, name=unittest] (uid=rnd-8pomcflgzmmfbl3l): Infrastructure error - contact devtools@ for details. Suite build deps: [-XaYuhY7RxKBSx1l4RWsgg {'project_path': 'ydb/tests/fq/control_plane_storage', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/fq/pq_async_io/ut, name=unittest] (uid=rnd-i3xc9z9m9j6se6fd): Infrastructure error - contact devtools@ for details. Suite build deps: [5nGc59DZ85okwuCWyBkpiQ {'project_path': 'ydb/tests/fq/pq_async_io/ut', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/backup, name=unittest] (uid=rnd-mhv38fut4bqdq1ab): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [bN7W2WVMYCpBrFLEGLNGxg {'project_path': 'ydb/tests/functional/backup', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/backup/s3_path_style, name=unittest] (uid=rnd-t6odmuzm5hh4thow): Infrastructure error - contact devtools@ for details. Suite build deps: [-SsLDoMlrPe028IXd7n06w {'project_path': 'ydb/tests/tools/s3_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [Y5b83d3WjzfTqw7l7fRKRg {'project_path': 'ydb/tests/functional/backup/s3_path_style', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}], [yxNWThklOGHiAPqLldgcvQ {'project_path': 'contrib/python/moto/bin', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}] Warn: Test [project=ydb/tests/functional/kqp/kqp_indexes, name=unittest] (uid=rnd-wsm3hu509qwhxc8f): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [_bp4Z1PK-crRkq9qIbFrTA {'project_path': 'ydb/tests/functional/kqp/kqp_indexes', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_session, name=unittest] (uid=rnd-de4ih0282getbu71): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ds7GTAYkisroQnMrU3yjcg {'project_path': 'ydb/tests/functional/kqp/kqp_query_session', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/kqp/kqp_query_svc, name=unittest] (uid=rnd-fl2gb5s5xn9p4et1): Infrastructure error - contact devtools@ for details. Suite build deps: [0S_vq78qwiwZu77-vrePzw {'project_path': 'ydb/tests/functional/kqp/kqp_query_svc', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/replication, name=unittest] (uid=rnd-n6afb2t2r6fp009s): Infrastructure error - contact devtools@ for details. Suite build deps: [2WsX5QEvKs50Pv6ufzFAgA {'project_path': 'ydb/tests/functional/replication', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Warn: Test [project=ydb/tests/functional/sdk/cpp/sdk_credprovider, name=unittest] (uid=rnd-darux0mmc1gpl39q): Infrastructure error - contact devtools@ for details. Suite build deps: [BO8FPWPGjB4FXwd7tZyedQ {'project_path': 'ydb/apps/ydbd', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [U5s94rOjx9N8XJe89PMEWA {'project_path': 'ydb/tests/functional/sdk/cpp/sdk_credprovider', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [ZU7EAKzN9lXtL0fdzIW-ZA {'project_path': 'ydb/public/tools/ydb_recipe', 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': ['default-linux-x86_64', 'relwithdebinfo']}], [test-ctx-gen-16070572007185876608 {'project_path': None, 'platform': 'default-linux-x86_64-relwithdebinfo', 'tags': []}] Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Failed + echo 1